This is an automated email from the ASF dual-hosted git repository.

chenliang613 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 448564aadc optimizeCodeSmellInSpark (#4328)
448564aadc is described below

commit 448564aadc2c9a1904b11346b6fc555e58e25493
Author: Bo Xu <[email protected]>
AuthorDate: Thu Oct 19 15:49:56 2023 +0800

    optimizeCodeSmellInSpark (#4328)
---
 .../vectorreader/VectorizedCarbonRecordReader.java     |  2 +-
 .../carbondata/stream/CarbonStreamRecordReader.java    |  1 +
 .../java/org/apache/spark/sql/CarbonVectorProxy.java   |  2 ++
 .../secondaryindex/jobs/CarbonBlockLoaderHelper.java   |  4 ++--
 .../secondaryindex/load/CarbonInternalLoaderUtil.java  | 18 +++++++++++-------
 .../spark/sql/secondaryindex/load/RowComparator.java   |  1 +
 .../carbondata/index/secondary/SecondaryIndex.java     |  6 +++---
 .../org/apache/spark/sql/index/CarbonIndexUtil.scala   |  2 +-
 .../sql/secondaryindex/command/SICreationCommand.scala |  2 +-
 9 files changed, 23 insertions(+), 15 deletions(-)

diff --git 
a/integration/spark/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
 
b/integration/spark/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
index 96d05422ad..5b85f78e59 100644
--- 
a/integration/spark/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
+++ 
b/integration/spark/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
@@ -101,7 +101,7 @@ public class VectorizedCarbonRecordReader extends 
AbstractRecordReader<Object> {
       String enableBatch) {
     this.queryModel = queryModel;
     this.inputMetricsStats = inputMetricsStats;
-    if (enableBatch.equals("true")) {
+    if ("true".equals(enableBatch)) {
       enableReturningBatches();
     }
   }
diff --git 
a/integration/spark/src/main/java/org/apache/carbondata/stream/CarbonStreamRecordReader.java
 
b/integration/spark/src/main/java/org/apache/carbondata/stream/CarbonStreamRecordReader.java
index 5b5eab2703..515e362859 100644
--- 
a/integration/spark/src/main/java/org/apache/carbondata/stream/CarbonStreamRecordReader.java
+++ 
b/integration/spark/src/main/java/org/apache/carbondata/stream/CarbonStreamRecordReader.java
@@ -54,6 +54,7 @@ public class CarbonStreamRecordReader extends 
StreamRecordReader {
     this.inputMetricsStats = inputMetricsStats;
   }
 
+  @Override
   protected void initializeAtFirstRow() throws IOException {
     super.initializeAtFirstRow();
     outputRow = new GenericInternalRow(outputValues);
diff --git 
a/integration/spark/src/main/java/org/apache/spark/sql/CarbonVectorProxy.java 
b/integration/spark/src/main/java/org/apache/spark/sql/CarbonVectorProxy.java
index e96319370c..ab310bbca0 100644
--- 
a/integration/spark/src/main/java/org/apache/spark/sql/CarbonVectorProxy.java
+++ 
b/integration/spark/src/main/java/org/apache/spark/sql/CarbonVectorProxy.java
@@ -531,6 +531,7 @@ public class CarbonVectorProxy {
       return vector.getChild(i);
     }
 
+    @Override
     public void reset() {
       isLoaded = false;
       pageLoad = null;
@@ -546,6 +547,7 @@ public class CarbonVectorProxy {
       }
     }
 
+    @Override
     public void setLazyPage(LazyPageLoader lazyPage) {
       this.pageLoad = lazyPage;
     }
diff --git 
a/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/jobs/CarbonBlockLoaderHelper.java
 
b/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/jobs/CarbonBlockLoaderHelper.java
index a5a6e8aa2d..59dfe0378b 100644
--- 
a/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/jobs/CarbonBlockLoaderHelper.java
+++ 
b/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/jobs/CarbonBlockLoaderHelper.java
@@ -29,7 +29,7 @@ import 
org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
  */
 public class CarbonBlockLoaderHelper {
 
-  private static final CarbonBlockLoaderHelper carbonBlockLoaderHelper =
+  private static final CarbonBlockLoaderHelper CARBON_BLOCK_LOADER_HELPER =
       new CarbonBlockLoaderHelper();
   /**
    * maintains the map of segments already considered for the btree load
@@ -46,7 +46,7 @@ public class CarbonBlockLoaderHelper {
    * @return
    */
   public static CarbonBlockLoaderHelper getInstance() {
-    return carbonBlockLoaderHelper;
+    return CARBON_BLOCK_LOADER_HELPER;
   }
 
   private Set<String> getTableBlocks(AbsoluteTableIdentifier 
absoluteTableIdentifier) {
diff --git 
a/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java
 
b/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java
index 77992208e8..cded70cefb 100644
--- 
a/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java
+++ 
b/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java
@@ -54,15 +54,19 @@ public class CarbonInternalLoaderUtil {
     List<String> activeSlices = new 
ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
     for (LoadMetadataDetails oneLoad : details) {
       // External added segments are not loaded to SI
-      if (oneLoad.getPath() == null && 
SegmentStatus.SUCCESS.equals(oneLoad.getSegmentStatus())
-          || 
SegmentStatus.LOAD_PARTIAL_SUCCESS.equals(oneLoad.getSegmentStatus())
-          || 
SegmentStatus.MARKED_FOR_UPDATE.equals(oneLoad.getSegmentStatus())) {
+      if (isaBoolean(oneLoad)) {
         activeSlices.add(oneLoad.getLoadName());
       }
     }
     return activeSlices;
   }
 
+  private static boolean isaBoolean(LoadMetadataDetails oneLoad) {
+    return oneLoad.getPath() == null && 
SegmentStatus.SUCCESS.equals(oneLoad.getSegmentStatus())
+        || 
SegmentStatus.LOAD_PARTIAL_SUCCESS.equals(oneLoad.getSegmentStatus())
+        || SegmentStatus.MARKED_FOR_UPDATE.equals(oneLoad.getSegmentStatus());
+  }
+
   /**
    * This method will return the mapping of valid segments to segment load 
start time
    */
@@ -340,17 +344,17 @@ public class CarbonInternalLoaderUtil {
     return tableStatusUpdateStatus;
   }
 
-  public static boolean checkMainTableSegEqualToSISeg(
+  public static boolean checkMainTableSegEqualToSiSeg(
       LoadMetadataDetails[] mainTableLoadMetadataDetails,
       LoadMetadataDetails[] siTableLoadMetadataDetails) throws ErrorMessage {
-    return checkMainTableSegEqualToSISeg(mainTableLoadMetadataDetails, 
siTableLoadMetadataDetails,
+    return checkMainTableSegEqualToSiSeg(mainTableLoadMetadataDetails, 
siTableLoadMetadataDetails,
         false);
   }
 
   /**
    * Method to check if main table and SI have same number of valid segments 
or not
    */
-  public static boolean checkMainTableSegEqualToSISeg(
+  public static boolean checkMainTableSegEqualToSiSeg(
       LoadMetadataDetails[] mainTableLoadMetadataDetails,
       LoadMetadataDetails[] siTableLoadMetadataDetails, boolean 
isRegisterIndex)
       throws ErrorMessage {
@@ -385,7 +389,7 @@ public class CarbonInternalLoaderUtil {
   /**
    * Method to check if main table has in progress load and same segment not 
present in SI
    */
-  public static boolean checkInProgLoadInMainTableAndSI(CarbonTable 
carbonTable,
+  public static boolean checkInProgLoadInMainTableAndSi(CarbonTable 
carbonTable,
       LoadMetadataDetails[] mainTableLoadMetadataDetails,
       LoadMetadataDetails[] siTableLoadMetadataDetails) {
     List<String> allSiSlices = new 
ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
diff --git 
a/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/RowComparator.java
 
b/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/RowComparator.java
index 0892b2bfe7..9e9be7ac12 100644
--- 
a/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/RowComparator.java
+++ 
b/integration/spark/src/main/java/org/apache/spark/sql/secondaryindex/load/RowComparator.java
@@ -44,6 +44,7 @@ public class RowComparator implements Comparator<Object[]> {
   /**
    * Below method will be used to compare two MDKeys
    */
+  @Override
   public int compare(Object[] rowA, Object[] rowB) {
     int diff = 0;
     int index = 0;
diff --git 
a/integration/spark/src/main/scala/org/apache/carbondata/index/secondary/SecondaryIndex.java
 
b/integration/spark/src/main/scala/org/apache/carbondata/index/secondary/SecondaryIndex.java
index 1cdc02384d..e0229c2ecd 100644
--- 
a/integration/spark/src/main/scala/org/apache/carbondata/index/secondary/SecondaryIndex.java
+++ 
b/integration/spark/src/main/scala/org/apache/carbondata/index/secondary/SecondaryIndex.java
@@ -72,15 +72,15 @@ public class SecondaryIndex extends CoarseGrainIndex {
   public void validateSegmentList(String indexPath, String tableStatusVersion) 
{
     LoadMetadataDetails[] loadMetadataDetails = SegmentStatusManager
         .readLoadMetadata(CarbonTablePath.getMetadataPath(indexPath), 
tableStatusVersion);
-    Set<String> validSISegments = new HashSet<>();
+    Set<String> validSiSegments = new HashSet<>();
     for (LoadMetadataDetails loadMetadataDetail : loadMetadataDetails) {
       if (loadMetadataDetail.getSegmentStatus() == SegmentStatus.SUCCESS
           || loadMetadataDetail.getSegmentStatus() == 
SegmentStatus.MARKED_FOR_UPDATE
           || loadMetadataDetail.getSegmentStatus() == 
SegmentStatus.LOAD_PARTIAL_SUCCESS) {
-        validSISegments.add(loadMetadataDetail.getLoadName());
+        validSiSegments.add(loadMetadataDetail.getLoadName());
       }
     }
-    validSegmentIds = Sets.intersection(validSISegments, validSegmentIds);
+    validSegmentIds = Sets.intersection(validSiSegments, validSegmentIds);
   }
 
   private Set<String> getPositionReferences(String databaseName, String 
indexName,
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/index/CarbonIndexUtil.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/index/CarbonIndexUtil.scala
index 5c55e94b31..ed841943ef 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/index/CarbonIndexUtil.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/index/CarbonIndexUtil.scala
@@ -536,7 +536,7 @@ object CarbonIndexUtil {
         val siTblLoadMetadataDetails: Array[LoadMetadataDetails] =
           SegmentStatusManager.readLoadMetadata(indexTable.getMetadataPath,
             indexTable.getTableStatusVersion)
-        if (!CarbonInternalLoaderUtil.checkMainTableSegEqualToSISeg(
+        if (!CarbonInternalLoaderUtil.checkMainTableSegEqualToSiSeg(
           mainTableDetails,
           siTblLoadMetadataDetails)) {
           val indexColumns = 
indexMetadata.getIndexColumns(secondaryIndexProvider,
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala
index 06e1b51c0b..07d868cc80 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala
@@ -263,7 +263,7 @@ private[sql] case class CarbonCreateSecondaryIndexCommand(
       if (isRegisterIndex) {
         // check if SI segments are more than main table segments
         CarbonInternalLoaderUtil
-          .checkMainTableSegEqualToSISeg(mainTblLoadMetadataDetails,
+          .checkMainTableSegEqualToSiSeg(mainTblLoadMetadataDetails,
             siTblLoadMetadataDetails, isRegisterIndex)
         // check if SI table has undergone any Update or delete operation, 
which can happen in
         // case of compatibility scenario. IUD after Refresh SI and before 
register index

Reply via email to