This is an automated email from the ASF dual-hosted git repository.

codope pushed a commit to branch release-0.12.0
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit bf56e6fe24348e7d226c2bda8491207524231fd0
Author: shaoxiong.zhan <[email protected]>
AuthorDate: Thu Aug 4 09:41:10 2022 +0800

    [HUDI-4520] Support qualified table 'db.table' in call procedures (#6274)
---
 .../scala/org/apache/hudi/HoodieCLIUtils.scala     | 15 ++++++++++
 .../hudi/command/procedures/BaseProcedure.scala    |  4 ++-
 .../procedures/CommitsCompareProcedure.scala       |  3 +-
 .../procedures/ExportInstantsProcedure.scala       |  3 +-
 .../RollbackToInstantTimeProcedure.scala           |  3 +-
 .../procedures/ShowArchivedCommitsProcedure.scala  |  3 +-
 .../procedures/ShowCommitFilesProcedure.scala      |  3 +-
 .../procedures/ShowCommitPartitionsProcedure.scala |  3 +-
 .../procedures/ShowCommitWriteStatsProcedure.scala |  3 +-
 .../command/procedures/ShowCommitsProcedure.scala  |  3 +-
 .../hudi/TestNestedSchemaPruningOptimization.scala |  1 -
 ...re.scala => HoodieSparkProcedureTestBase.scala} | 32 ++--------------------
 .../hudi/procedure/TestBootstrapProcedure.scala    |  3 +-
 .../sql/hudi/procedure/TestCallProcedure.scala     |  3 +-
 .../sql/hudi/procedure/TestCleanProcedure.scala    |  4 +--
 .../hudi/procedure/TestClusteringProcedure.scala   |  3 +-
 .../sql/hudi/procedure/TestCommitsProcedure.scala  |  4 +--
 .../hudi/procedure/TestCompactionProcedure.scala   |  3 +-
 .../hudi/procedure/TestCopyToTableProcedure.scala  |  3 +-
 .../procedure/TestExportInstantsProcedure.scala    |  4 +--
 .../sql/hudi/procedure/TestFsViewProcedure.scala   |  4 +--
 .../procedure/TestHdfsParquetImportProcedure.scala |  3 +-
 .../procedure/TestHoodieLogFileProcedure.scala     |  4 +--
 .../sql/hudi/procedure/TestMetadataProcedure.scala |  4 +--
 .../sql/hudi/procedure/TestRepairsProcedure.scala  |  3 +-
 .../hudi/procedure/TestSavepointsProcedure.scala   |  4 +--
 .../procedure/TestShowFsPathDetailProcedure.scala  |  4 +--
 .../sql/hudi/procedure/TestStatsProcedure.scala    |  4 +--
 .../TestUpgradeOrDowngradeProcedure.scala          |  3 +-
 29 files changed, 54 insertions(+), 82 deletions(-)

diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala
index 552e3cfc9b..0d3edd592d 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala
@@ -22,8 +22,11 @@ package org.apache.hudi
 import org.apache.hudi.avro.model.HoodieClusteringGroup
 import org.apache.hudi.client.SparkRDDWriteClient
 import org.apache.hudi.common.table.{HoodieTableMetaClient, 
TableSchemaResolver}
+import org.apache.spark.SparkException
 import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable
 import org.apache.spark.sql.hudi.HoodieSqlCommonUtils.withSparkConf
 
 import scala.collection.JavaConverters.{collectionAsScalaIterableConverter, 
mapAsJavaMapConverter}
@@ -57,4 +60,16 @@ object HoodieCLIUtils {
 
     partitionPaths.sorted.mkString(",")
   }
+
+  def getHoodieCatalogTable(sparkSession: SparkSession, table: String): 
HoodieCatalogTable = {
+    val seq: Seq[String] = table.split('.')
+    seq match {
+      case Seq(tableName) =>
+        HoodieCatalogTable(sparkSession, TableIdentifier(tableName))
+      case Seq(database, tableName) =>
+        HoodieCatalogTable(sparkSession, TableIdentifier(tableName, 
Some(database)))
+      case _ =>
+        throw new SparkException(s"Unsupported identifier $table")
+    }
+  }
 }
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala
index 99e488784c..70799d3dc1 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala
@@ -17,12 +17,14 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.client.SparkRDDWriteClient
 import org.apache.hudi.client.common.HoodieSparkEngineContext
 import org.apache.hudi.common.model.HoodieRecordPayload
 import org.apache.hudi.config.{HoodieIndexConfig, HoodieWriteConfig}
 import org.apache.hudi.exception.HoodieClusteringException
 import org.apache.hudi.index.HoodieIndex.IndexType
+import org.apache.spark.SparkException
 import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable
@@ -112,7 +114,7 @@ abstract class BaseProcedure extends Procedure {
 
   protected def getBasePath(tableName: Option[Any], tablePath: Option[Any] = 
Option.empty): String = {
     tableName.map(
-      t => HoodieCatalogTable(sparkSession, new 
TableIdentifier(t.asInstanceOf[String])).tableLocation)
+      t => HoodieCLIUtils.getHoodieCatalogTable(sparkSession, 
t.asInstanceOf[String]).tableLocation)
       .getOrElse(
         tablePath.map(p => p.asInstanceOf[String]).getOrElse(
           throw new HoodieClusteringException("Table name or table path must 
be given one"))
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala
index 86262dc302..9cb03bffc9 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.timeline.HoodieTimeline
 import org.apache.spark.sql.Row
@@ -47,7 +48,7 @@ class CommitsCompareProcedure() extends BaseProcedure with 
ProcedureBuilder {
     val table = getArgValueOrDefault(args, 
PARAMETERS(0)).get.asInstanceOf[String]
     val path = getArgValueOrDefault(args, 
PARAMETERS(1)).get.asInstanceOf[String]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val source = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build
     val target = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(path).build
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
index cf400dd6d5..114f4c4ee1 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.hudi.command.procedures
 import org.apache.avro.generic.GenericRecord
 import org.apache.avro.specific.SpecificData
 import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.avro.HoodieAvroUtils
 import org.apache.hudi.avro.model.HoodieArchivedMetaEntry
 import org.apache.hudi.common.fs.FSUtils
@@ -72,7 +73,7 @@ class ExportInstantsProcedure extends BaseProcedure with 
ProcedureBuilder with L
     val actions: String = getArgValueOrDefault(args, 
PARAMETERS(3)).get.asInstanceOf[String]
     val desc = getArgValueOrDefault(args, 
PARAMETERS(4)).get.asInstanceOf[Boolean]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val metaClient = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build
     val archivePath = new Path(basePath + "/.hoodie/.commits_.archive*")
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala
index f802e1e78b..1fcc665d61 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.timeline.HoodieTimeline
 import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion
@@ -49,7 +50,7 @@ class RollbackToInstantTimeProcedure extends BaseProcedure 
with ProcedureBuilder
     val table = getArgValueOrDefault(args, 
PARAMETERS(0)).get.asInstanceOf[String]
     val instantTime = getArgValueOrDefault(args, 
PARAMETERS(1)).get.asInstanceOf[String]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val client = createHoodieClient(jsc, basePath)
     client.getConfig.setValue(ROLLBACK_USING_MARKERS_ENABLE, "false")
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala
index 957dfbe8bf..a3c3ece293 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.common.model.HoodieCommitMetadata
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.timeline.{HoodieActiveTimeline, 
HoodieDefaultTimeline, HoodieInstant}
@@ -82,7 +83,7 @@ class ShowArchivedCommitsProcedure(includeExtraMetadata: 
Boolean) extends BasePr
     var startTs = getArgValueOrDefault(args, 
PARAMETERS(2)).get.asInstanceOf[String]
     var endTs = getArgValueOrDefault(args, 
PARAMETERS(3)).get.asInstanceOf[String]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val metaClient = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build
 
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala
index d4581be7f4..53fcd072c3 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.common.model.{HoodieCommitMetadata, 
HoodieReplaceCommitMetadata, HoodieWriteStat}
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline}
@@ -61,7 +62,7 @@ class ShowCommitFilesProcedure() extends BaseProcedure with 
ProcedureBuilder {
     val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int]
     val instantTime = getArgValueOrDefault(args, 
PARAMETERS(2)).get.asInstanceOf[String]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val metaClient = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build
     val activeTimeline = metaClient.getActiveTimeline
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala
index d358f996f3..0a3945aee8 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.common.model.{HoodieCommitMetadata, 
HoodieReplaceCommitMetadata, HoodieWriteStat}
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline}
@@ -60,7 +61,7 @@ class ShowCommitPartitionsProcedure() extends BaseProcedure 
with ProcedureBuilde
     val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int]
     val instantTime = getArgValueOrDefault(args, 
PARAMETERS(2)).get.asInstanceOf[String]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val metaClient = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build
     val activeTimeline = metaClient.getActiveTimeline
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala
index 594d187699..4e3609b533 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.common.model.{HoodieCommitMetadata, 
HoodieReplaceCommitMetadata}
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline}
@@ -55,7 +56,7 @@ class ShowCommitWriteStatsProcedure() extends BaseProcedure 
with ProcedureBuilde
     val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int]
     val instantTime = getArgValueOrDefault(args, 
PARAMETERS(2)).get.asInstanceOf[String]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val metaClient = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build
     val activeTimeline = metaClient.getActiveTimeline
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala
index 1dc395ad27..169acce887 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.hudi.command.procedures
 
+import org.apache.hudi.HoodieCLIUtils
 import org.apache.hudi.common.model.HoodieCommitMetadata
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.timeline.{HoodieDefaultTimeline, 
HoodieInstant}
@@ -78,7 +79,7 @@ class ShowCommitsProcedure(includeExtraMetadata: Boolean) 
extends BaseProcedure
     val table = getArgValueOrDefault(args, 
PARAMETERS(0)).get.asInstanceOf[String]
     val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int]
 
-    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new 
TableIdentifier(table))
+    val hoodieCatalogTable = 
HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table)
     val basePath = hoodieCatalogTable.tableLocation
     val metaClient = 
HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build
 
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala
index f47ff6be1b..87d19d31d9 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.sql.hudi
 
 import org.apache.hudi.{HoodieSparkUtils, SparkAdapterSupport}
-import org.apache.spark.sql.DataFrame
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution.{FileSourceScanExec, ProjectExec, 
RowDataSourceScanExec, SparkPlan}
 import org.apache.spark.sql.internal.SQLConf
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/HoodieSparkProcedureTestBase.scala
similarity index 50%
copy from 
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
copy to 
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/HoodieSparkProcedureTestBase.scala
index cd4e3a7ac6..eb1390e81c 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/HoodieSparkProcedureTestBase.scala
@@ -19,34 +19,8 @@ package org.apache.spark.sql.hudi.procedure
 
 import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
-class TestExportInstantsProcedure extends HoodieSparkSqlTestBase {
-
-  test("Test Call export_instants Procedure") {
-    withTempDir { tmp =>
-      val tableName = generateTableName
-      // create table
-      spark.sql(
-        s"""
-           |create table $tableName (
-           |  id int,
-           |  name string,
-           |  price double,
-           |  ts long
-           |) using hudi
-           | location '${tmp.getCanonicalPath}/$tableName'
-           | tblproperties (
-           |  primaryKey = 'id',
-           |  preCombineField = 'ts'
-           | )
-       """.stripMargin)
-
-      // insert data to table
-      spark.sql(s"insert into $tableName select 1, 'a1', 10, 1000")
-
-      val result = spark.sql(s"""call export_instants(table => '$tableName', 
local_folder => '${tmp.getCanonicalPath}/$tableName')""").limit(1).collect()
-      assertResult(1) {
-        result.length
-      }
-    }
+class HoodieSparkProcedureTestBase extends HoodieSparkSqlTestBase {
+  override def generateTableName: String = {
+    s"default.${super.generateTableName}"
   }
 }
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
index f1e15a88c2..bc02c0402b 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
@@ -21,13 +21,12 @@ import org.apache.hadoop.fs.Path
 import org.apache.hudi.common.model.HoodieTableType
 import org.apache.hudi.functional.TestBootstrap
 import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 import org.apache.spark.sql.{Dataset, Row}
 
 import java.time.Instant
 import java.util
 
-class TestBootstrapProcedure extends HoodieSparkSqlTestBase {
+class TestBootstrapProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call run_bootstrap Procedure") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala
index 3bd7b01673..094b8b1aca 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala
@@ -19,9 +19,8 @@ package org.apache.spark.sql.hudi.procedure
 
 import org.apache.hudi.common.model.IOType
 import org.apache.hudi.common.testutils.FileCreateUtils
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
-class TestCallProcedure extends HoodieSparkSqlTestBase {
+class TestCallProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call show_commits Procedure") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala
index 316dccca52..f93be88fe0 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala
@@ -19,9 +19,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestCleanProcedure extends HoodieSparkSqlTestBase {
+class TestCleanProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call run_clean Procedure by Table") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala
index df4d8c90e2..456f9c5066 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala
@@ -23,11 +23,10 @@ import org.apache.hadoop.fs.Path
 import org.apache.hudi.common.table.timeline.{HoodieActiveTimeline, 
HoodieInstant, HoodieTimeline}
 import org.apache.hudi.common.util.{Option => HOption}
 import org.apache.hudi.{HoodieCLIUtils, HoodieDataSourceHelpers}
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
 import scala.collection.JavaConverters.asScalaIteratorConverter
 
-class TestClusteringProcedure extends HoodieSparkSqlTestBase {
+class TestClusteringProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call run_clustering Procedure By Table") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala
index 2539ff7c36..2840b22434 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestCommitsProcedure extends HoodieSparkSqlTestBase {
+class TestCommitsProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call show_archived_commits Procedure") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala
index 39332d8591..e9d9d550d3 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala
@@ -20,9 +20,8 @@
 package org.apache.spark.sql.hudi.procedure
 
 import org.apache.hudi.common.table.timeline.HoodieInstant
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
-class TestCompactionProcedure extends HoodieSparkSqlTestBase {
+class TestCompactionProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call run_compaction Procedure by Table") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala
index 57025ab0b6..6866b62f37 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala
@@ -18,11 +18,10 @@
 package org.apache.spark.sql.hudi.procedure
 
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
 import java.util
 
-class TestCopyToTableProcedure extends HoodieSparkSqlTestBase {
+class TestCopyToTableProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call copy_to_table Procedure with default params") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
index cd4e3a7ac6..b6a83e64fa 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestExportInstantsProcedure extends HoodieSparkSqlTestBase {
+class TestExportInstantsProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call export_instants Procedure") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala
index 69d08e37df..64da833b9d 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestFsViewProcedure extends HoodieSparkSqlTestBase {
+class TestFsViewProcedure extends HoodieSparkProcedureTestBase {
   test("Test Call show_fsview_all Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
index 1a4d3e2e91..ea83c828c5 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
@@ -29,7 +29,6 @@ import org.apache.parquet.hadoop.ParquetWriter
 import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.parser.ParseException
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 import org.junit.jupiter.api.Assertions.assertTrue
 
 import java.io.IOException
@@ -37,7 +36,7 @@ import java.util
 import java.util.Objects
 import java.util.concurrent.TimeUnit
 
-class TestHdfsParquetImportProcedure extends HoodieSparkSqlTestBase {
+class TestHdfsParquetImportProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call hdfs_parquet_import Procedure with insert operation") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala
index 41954c8002..be6a2bb762 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala
@@ -19,9 +19,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestHoodieLogFileProcedure extends HoodieSparkSqlTestBase {
+class TestHoodieLogFileProcedure extends HoodieSparkProcedureTestBase {
   test("Test Call show_logfile_metadata Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala
index 5a26aaa0cf..ba90fe3cb9 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestMetadataProcedure extends HoodieSparkSqlTestBase {
+class TestMetadataProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call delete_metadata_table Procedure") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala
index 587f7a4bdd..f6ce92b415 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala
@@ -29,14 +29,13 @@ import 
org.apache.hudi.common.table.view.HoodieTableFileSystemView
 import org.apache.hudi.common.testutils.{HoodieTestDataGenerator, 
SchemaTestUtil}
 import org.apache.hudi.testutils.HoodieSparkWriteableTestTable
 import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
 import java.io.IOException
 import java.net.URL
 import java.nio.file.{Files, Paths}
 import scala.collection.JavaConverters.asScalaIteratorConverter
 
-class TestRepairsProcedure extends HoodieSparkSqlTestBase {
+class TestRepairsProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call repair_add_partition_meta Procedure") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala
index 24036519cd..c0ca6735a3 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
+class TestSavepointsProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call create_savepoint Procedure") {
     withTempDir { tmp =>
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala
index 8940f7c4ee..cc6164f071 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestShowFsPathDetailProcedure extends HoodieSparkSqlTestBase {
+class TestShowFsPathDetailProcedure extends HoodieSparkProcedureTestBase {
   test("Test Call show_fs_path_detail Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala
index ad0179b58b..f4dd8ad63b 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala
@@ -19,9 +19,7 @@
 
 package org.apache.spark.sql.hudi.procedure
 
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
-
-class TestStatsProcedure extends HoodieSparkSqlTestBase {
+class TestStatsProcedure extends HoodieSparkProcedureTestBase {
   test("Test Call stats_wa Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
index a9a763c8fd..962f28fc42 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
@@ -21,11 +21,10 @@ import org.apache.hadoop.fs.Path
 import org.apache.hudi.common.config.HoodieConfig
 import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient, 
HoodieTableVersion}
 import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
 import java.io.IOException
 
-class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase {
+class TestUpgradeOrDowngradeProcedure extends HoodieSparkProcedureTestBase {
 
   test("Test Call downgrade_table and upgrade_table Procedure") {
     withTempDir { tmp =>

Reply via email to