nihal0107 commented on a change in pull request #4037:
URL: https://github.com/apache/carbondata/pull/4037#discussion_r543354758



##########
File path: 
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CarbonIndexFileMergeTestCase.scala
##########
@@ -111,6 +111,32 @@ class CarbonIndexFileMergeTestCase
     checkAnswer(sql("""Select count(*) from nonindexmerge"""), rows)
   }
 
+  test("verify index file merge for SI") {

Review comment:
       removed

##########
File path: 
index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
##########
@@ -287,6 +294,65 @@ class CarbonDataFileMergeTestCaseOnSI
       CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE_DEFAULT)
   }
 
+  test("test verify data file merge when exception occurred in rebuild 
segment") {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE, "false")
+    sql("DROP TABLE IF EXISTS nonindexmerge")
+    sql(
+      """
+        | CREATE TABLE nonindexmerge(id INT, name STRING, city STRING, age INT)
+        | STORED AS carbondata
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge 
OPTIONS('header'='false', " +
+      s"'GLOBAL_SORT_PARTITIONS'='100')")
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge 
OPTIONS('header'='false', " +
+      s"'GLOBAL_SORT_PARTITIONS'='100')")
+    sql("CREATE INDEX nonindexmerge_index1 on table nonindexmerge (name) AS 
'carbondata'")
+    // when merge data file will throw the exception
+    val mock1 = mockDataFileMerge()
+    val ex = intercept[RuntimeException] {
+      sql("REFRESH INDEX nonindexmerge_index1 ON TABLE 
nonindexmerge").collect()
+    }
+    mock1.tearDown()
+    assert(ex.getMessage.contains("An exception occurred while merging data 
files in SI"))
+    var df1 = sql("""Select * from nonindexmerge where name='n16000'""")
+      .queryExecution.sparkPlan
+    assert(isFilterPushedDownToSI(df1))
+    assert(getDataFileCount("nonindexmerge_index1", "0") == 100)
+    assert(getDataFileCount("nonindexmerge_index1", "1") == 100)
+    // not able to acquire lock on table
+    val mock2 = TestSecondaryIndexUtils.mockTableLock()
+    val exception = intercept[AnalysisException] {
+      sql("REFRESH INDEX nonindexmerge_index1 ON TABLE 
nonindexmerge").collect()
+    }
+    mock2.tearDown()
+    assert(exception.getMessage.contains("Table is already locked for 
compaction. " +
+      "Please try after some time."))
+    df1 = sql("""Select * from nonindexmerge where name='n16000'""")
+      .queryExecution.sparkPlan
+    assert(getDataFileCount("nonindexmerge_index1", "0") == 100)
+    assert(getDataFileCount("nonindexmerge_index1", "1") == 100)
+    
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE,
+      CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE_DEFAULT)
+  }
+
+  def mockDataFileMerge(): MockUp[SecondaryIndexUtil.type] = {

Review comment:
       done




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to