HIVE-16150: LLAP: HiveInputFormat:getRecordReader: Fix log statements to reduce 
memory pressure (Prasanth Jayachandran reviewed by Rajesh Balamohan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bb9ea57b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bb9ea57b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bb9ea57b

Branch: refs/heads/hive-14535
Commit: bb9ea57b9ad07d5fafc4f91ce1dd1f8f9262c564
Parents: 348a592
Author: Prasanth Jayachandran <[email protected]>
Authored: Wed Mar 8 19:47:01 2017 -0800
Committer: Prasanth Jayachandran <[email protected]>
Committed: Wed Mar 8 19:47:01 2017 -0800

----------------------------------------------------------------------
 .../hadoop/hive/ql/io/HiveInputFormat.java      | 24 +++++++++++++++-----
 1 file changed, 18 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/bb9ea57b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index 1cb9557..4995bdf 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -234,7 +234,9 @@ public class HiveInputFormat<K extends WritableComparable, 
V extends Writable>
         for (String format : formats) {
           // TODO: should we check isAssignableFrom?
           if (ifName.equals(format)) {
-            LOG.info("Using SerDe-based LLAP reader for " + ifName);
+            if (LOG.isInfoEnabled()) {
+              LOG.info("Using SerDe-based LLAP reader for " + ifName);
+            }
             isSupported = isSerdeBased = true;
             break;
           }
@@ -242,8 +244,10 @@ public class HiveInputFormat<K extends WritableComparable, 
V extends Writable>
       }
     }
     if (!isSupported || !isVectorized) {
-      LOG.info("Not using llap for " + ifName + ": supported = "
+      if (LOG.isInfoEnabled()) {
+        LOG.info("Not using llap for " + ifName + ": supported = "
           + isSupported + ", vectorized = " + isVectorized);
+      }
       return inputFormat;
     }
     if (LOG.isDebugEnabled()) {
@@ -253,21 +257,27 @@ public class HiveInputFormat<K extends 
WritableComparable, V extends Writable>
     @SuppressWarnings("unchecked")
     LlapIo<VectorizedRowBatch> llapIo = LlapProxy.getIo();
     if (llapIo == null) {
-      LOG.info("Not using LLAP IO because it is not initialized");
+      if (LOG.isInfoEnabled()) {
+        LOG.info("Not using LLAP IO because it is not initialized");
+      }
       return inputFormat;
     }
     Deserializer serde = null;
     if (isSerdeBased) {
       if (part == null) {
-        LOG.info("Not using LLAP IO because there's no partition spec for 
SerDe-based IF");
+        if (LOG.isInfoEnabled()) {
+          LOG.info("Not using LLAP IO because there's no partition spec for 
SerDe-based IF");
+        }
         return inputFormat;
       }
       VectorPartitionDesc vpart =  part.getVectorPartitionDesc();
       if (vpart != null) {
         VectorMapOperatorReadType old = vpart.getVectorMapOperatorReadType();
         if (old != VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT) {
-          LOG.info("Resetting VectorMapOperatorReadType from " + old + " for 
partition "
+          if (LOG.isInfoEnabled()) {
+            LOG.info("Resetting VectorMapOperatorReadType from " + old + " for 
partition "
               + part.getTableName() + " " + part.getPartSpec());
+          }
           vpart.setVectorMapOperatorReadType(
               VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT);
         }
@@ -344,7 +354,9 @@ public class HiveInputFormat<K extends WritableComparable, 
V extends Writable>
     boolean nonNative = false;
     PartitionDesc part = 
HiveFileFormatUtils.getPartitionDescFromPathRecursively(
         pathToPartitionInfo, hsplit.getPath(), null);
-    LOG.debug("Found spec for " + hsplit.getPath() + " " + part + " from " + 
pathToPartitionInfo);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Found spec for " + hsplit.getPath() + " " + part + " from " + 
pathToPartitionInfo);
+    }
 
     if ((part != null) && (part.getTableDesc() != null)) {
       Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job);

Reply via email to