KnightChess commented on code in PR #5564:
URL: https://github.com/apache/hudi/pull/5564#discussion_r872506269


##########
hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/DropHoodieTableCommand.scala:
##########
@@ -88,42 +86,33 @@ extends HoodieLeafRunnableCommand {
     }
   }
 
-  private def dropHiveDataSourceTable(
-       sparkSession: SparkSession,
-       hoodieCatalogTable: HoodieCatalogTable): Unit = {
-    val table = hoodieCatalogTable.table
-    val dbName = table.identifier.database.get
-    val tableName = hoodieCatalogTable.tableName
-
-    // check database exists
-    val dbExists = sparkSession.sessionState.catalog.databaseExists(dbName)
-    if (!dbExists) {
-      throw new NoSuchDatabaseException(dbName)
-    }
-
-    if (HoodieTableType.MERGE_ON_READ == hoodieCatalogTable.tableType && 
purge) {
-      val snapshotTableName = tableName + MOR_SNAPSHOT_TABLE_SUFFIX
-      val roTableName = tableName + MOR_READ_OPTIMIZED_TABLE_SUFFIX
-
-      dropHiveTable(sparkSession, dbName, snapshotTableName)
-      dropHiveTable(sparkSession, dbName, roTableName)
+  private def getTableRTAndRO(catalog: SessionCatalog,
+      hoodieTable: HoodieCatalogTable): (Option[CatalogTable], 
Option[CatalogTable]) = {
+    val rtIdt = hoodieTable.table.identifier.copy(
+      table = s"${hoodieTable.tableName}${MOR_SNAPSHOT_TABLE_SUFFIX}")
+    val roIdt = hoodieTable.table.identifier.copy(
+      table = s"${hoodieTable.tableName}${MOR_READ_OPTIMIZED_TABLE_SUFFIX}")
+
+    var rtTableOpt: Option[CatalogTable] = None
+    var roTableOpt: Option[CatalogTable] = None
+    if (catalog.tableExists(roIdt)) {
+      val rtTable = catalog.getTableMetadata(rtIdt)
+      if 
(rtTable.storage.locationUri.equals(hoodieTable.table.storage.locationUri)) {
+        rtTable.properties.get(ConfigUtils.IS_QUERY_AS_RO_TABLE) match {

Review Comment:
   HiveSyncTool  set  this param serde prop, CatalogTable prop I think will not 
contain this param



##########
hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/DropHoodieTableCommand.scala:
##########
@@ -88,42 +86,33 @@ extends HoodieLeafRunnableCommand {
     }
   }
 
-  private def dropHiveDataSourceTable(
-       sparkSession: SparkSession,
-       hoodieCatalogTable: HoodieCatalogTable): Unit = {
-    val table = hoodieCatalogTable.table
-    val dbName = table.identifier.database.get
-    val tableName = hoodieCatalogTable.tableName
-
-    // check database exists
-    val dbExists = sparkSession.sessionState.catalog.databaseExists(dbName)
-    if (!dbExists) {
-      throw new NoSuchDatabaseException(dbName)
-    }
-
-    if (HoodieTableType.MERGE_ON_READ == hoodieCatalogTable.tableType && 
purge) {
-      val snapshotTableName = tableName + MOR_SNAPSHOT_TABLE_SUFFIX
-      val roTableName = tableName + MOR_READ_OPTIMIZED_TABLE_SUFFIX
-
-      dropHiveTable(sparkSession, dbName, snapshotTableName)
-      dropHiveTable(sparkSession, dbName, roTableName)
+  private def getTableRTAndRO(catalog: SessionCatalog,
+      hoodieTable: HoodieCatalogTable): (Option[CatalogTable], 
Option[CatalogTable]) = {
+    val rtIdt = hoodieTable.table.identifier.copy(
+      table = s"${hoodieTable.tableName}${MOR_SNAPSHOT_TABLE_SUFFIX}")
+    val roIdt = hoodieTable.table.identifier.copy(
+      table = s"${hoodieTable.tableName}${MOR_READ_OPTIMIZED_TABLE_SUFFIX}")
+
+    var rtTableOpt: Option[CatalogTable] = None
+    var roTableOpt: Option[CatalogTable] = None
+    if (catalog.tableExists(roIdt)) {

Review Comment:
   rtIdt?



##########
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestDropTable.scala:
##########
@@ -72,4 +72,154 @@ class TestDropTable extends HoodieSparkSqlTestBase {
       }
     }
   }
+
+  test("Test Drop RO & RT table by purging base table.") {
+    withTempDir { tmp =>
+      val tableName = generateTableName
+      spark.sql(
+        s"""
+           |create table $tableName (
+           |  id int,
+           |  name string,
+           |  ts long
+           |) using hudi
+           | location '${tmp.getCanonicalPath}/$tableName'
+           | tblproperties (
+           |  type = 'mor',
+           |  primaryKey = 'id',
+           |  preCombineField = 'ts'
+           | )
+       """.stripMargin)
+
+      spark.sql(
+        s"""
+           |create table ${tableName}_ro using hudi
+           | location '${tmp.getCanonicalPath}/$tableName'
+           | tblproperties (
+           |  type = 'mor',
+           |  primaryKey = 'id',
+           |  preCombineField = 'ts',
+           |  hoodie.query.as.ro.table='true'

Review Comment:
   the case may be can't mock real hive sync tool function. make sure 
**hoodie.query.as.ro.table** is correct be set in serdeProperties



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to