[CARBONDATA-3067] Add check for debug to avoid string concat

For bloomfilter datamap, it will log debug for each blocklet. If the
data is huge, there maybe thousands of blocklet, so in this commit, we
will try to avoid necessary string concat if the debug level is not
enabled.

This closes #2889


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/3ee5de38
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/3ee5de38
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/3ee5de38

Branch: refs/heads/branch-1.5
Commit: 3ee5de380e031d9c455343b8e7a0b6a851c00f99
Parents: 2e5a4f5
Author: xuchuanyin <xuchuan...@hust.edu.cn>
Authored: Thu Nov 1 10:09:40 2018 +0800
Committer: ravipesala <ravi.pes...@gmail.com>
Committed: Wed Nov 21 22:39:53 2018 +0530

----------------------------------------------------------------------
 .../datamap/bloom/BloomCoarseGrainDataMap.java          | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ee5de38/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
----------------------------------------------------------------------
diff --git 
a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
 
b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
index 4ec215e..7a104fc 100644
--- 
a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
+++ 
b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
@@ -185,7 +185,9 @@ public class BloomCoarseGrainDataMap extends 
CoarseGrainDataMap {
     }
     for (BloomQueryModel bloomQueryModel : bloomQueryModels) {
       Set<Blocklet> tempHitBlockletsResult = new HashSet<>();
-      LOGGER.debug("prune blocklet for query: " + bloomQueryModel);
+      if (LOGGER.isDebugEnabled()) {
+        LOGGER.debug("prune blocklet for query: " + bloomQueryModel);
+      }
       BloomCacheKeyValue.CacheKey cacheKey = new BloomCacheKeyValue.CacheKey(
           this.indexPath.toString(), bloomQueryModel.columnName);
       BloomCacheKeyValue.CacheValue cacheValue = cache.get(cacheKey);
@@ -205,12 +207,14 @@ public class BloomCoarseGrainDataMap extends 
CoarseGrainDataMap {
           }
         }
         if (scanRequired) {
-          LOGGER.debug(String.format("BloomCoarseGrainDataMap: Need to scan -> 
blocklet#%s",
-              String.valueOf(bloomFilter.getBlockletNo())));
+          if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug(String.format("BloomCoarseGrainDataMap: Need to scan 
-> blocklet#%s",
+                String.valueOf(bloomFilter.getBlockletNo())));
+          }
           Blocklet blocklet = new Blocklet(bloomFilter.getShardName(),
               String.valueOf(bloomFilter.getBlockletNo()));
           tempHitBlockletsResult.add(blocklet);
-        } else {
+        } else if (LOGGER.isDebugEnabled()) {
           LOGGER.debug(String.format("BloomCoarseGrainDataMap: Skip scan -> 
blocklet#%s",
               String.valueOf(bloomFilter.getBlockletNo())));
         }

Reply via email to