Author: suresh Date: Wed Oct 16 21:00:07 2013 New Revision: 1532907 URL: http://svn.apache.org/r1532907 Log: HADOOP-10005. No need to check INFO severity level is enabled or not. Contributed by Jackie Chang.
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java?rev=1532907&r1=1532906&r2=1532907&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java Wed Oct 16 21:00:07 2013 @@ -972,12 +972,10 @@ public class MapTask extends Task { maxRec = kvmeta.capacity() / NMETA; softLimit = (int)(kvbuffer.length * spillper); bufferRemaining = softLimit; - if (LOG.isInfoEnabled()) { - LOG.info(JobContext.IO_SORT_MB + ": " + sortmb); - LOG.info("soft limit at " + softLimit); - LOG.info("bufstart = " + bufstart + "; bufvoid = " + bufvoid); - LOG.info("kvstart = " + kvstart + "; length = " + maxRec); - } + LOG.info(JobContext.IO_SORT_MB + ": " + sortmb); + LOG.info("soft limit at " + softLimit); + LOG.info("bufstart = " + bufstart + "; bufvoid = " + bufvoid); + LOG.info("kvstart = " + kvstart + "; length = " + maxRec); // k/v serialization comparator = job.getOutputKeyComparator(); @@ -1180,10 +1178,8 @@ public class MapTask extends Task { final int aligned = pos - (pos % METASIZE); kvindex = ((aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4; - if (LOG.isInfoEnabled()) { - LOG.info("(EQUATOR) " + pos + " kvi " + kvindex + - "(" + (kvindex * 4) + ")"); - } + LOG.info("(EQUATOR) " + pos + " kvi " + kvindex + + "(" + (kvindex * 4) + ")"); } /** @@ -1198,10 +1194,8 @@ public class MapTask extends Task { // set start/end to point to first meta record kvstart = kvend = ((aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4; - if (LOG.isInfoEnabled()) { - LOG.info("(RESET) equator " + e + " kv " + kvstart + "(" + - (kvstart * 4) + ")" + " kvi " + kvindex + "(" + (kvindex * 4) + ")"); - } + LOG.info("(RESET) equator " + e + " kv " + kvstart + "(" + + (kvstart * 4) + ")" + " kvi " + kvindex + "(" + (kvindex * 4) + ")"); } /** @@ -1456,15 +1450,13 @@ public class MapTask extends Task { if (kvindex != kvend) { kvend = (kvindex + NMETA) % kvmeta.capacity(); bufend = bufmark; - if (LOG.isInfoEnabled()) { - LOG.info("Spilling map output"); - LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark + - "; bufvoid = " + bufvoid); - LOG.info("kvstart = " + kvstart + "(" + (kvstart * 4) + - "); kvend = " + kvend + "(" + (kvend * 4) + - "); length = " + (distanceTo(kvend, kvstart, - kvmeta.capacity()) + 1) + "/" + maxRec); - } + LOG.info("Spilling map output"); + LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark + + "; bufvoid = " + bufvoid); + LOG.info("kvstart = " + kvstart + "(" + (kvstart * 4) + + "); kvend = " + kvend + "(" + (kvend * 4) + + "); length = " + (distanceTo(kvend, kvstart, + kvmeta.capacity()) + 1) + "/" + maxRec); sortAndSpill(); } } catch (InterruptedException e) { @@ -1547,15 +1539,13 @@ public class MapTask extends Task { kvend = (kvindex + NMETA) % kvmeta.capacity(); bufend = bufmark; spillInProgress = true; - if (LOG.isInfoEnabled()) { - LOG.info("Spilling map output"); - LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark + - "; bufvoid = " + bufvoid); - LOG.info("kvstart = " + kvstart + "(" + (kvstart * 4) + - "); kvend = " + kvend + "(" + (kvend * 4) + - "); length = " + (distanceTo(kvend, kvstart, - kvmeta.capacity()) + 1) + "/" + maxRec); - } + LOG.info("Spilling map output"); + LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark + + "; bufvoid = " + bufvoid); + LOG.info("kvstart = " + kvstart + "(" + (kvstart * 4) + + "); kvend = " + kvend + "(" + (kvend * 4) + + "); length = " + (distanceTo(kvend, kvstart, + kvmeta.capacity()) + 1) + "/" + maxRec); spillReady.signal(); } Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java?rev=1532907&r1=1532906&r2=1532907&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java Wed Oct 16 21:00:07 2013 @@ -81,14 +81,12 @@ public class TestMultiFileInputFormat ex } public void testFormat() throws IOException { - if(LOG.isInfoEnabled()) { - LOG.info("Test started"); - LOG.info("Max split count = " + MAX_SPLIT_COUNT); - LOG.info("Split count increment = " + SPLIT_COUNT_INCR); - LOG.info("Max bytes per file = " + MAX_BYTES); - LOG.info("Max number of files = " + MAX_NUM_FILES); - LOG.info("Number of files increment = " + NUM_FILES_INCR); - } + LOG.info("Test started"); + LOG.info("Max split count = " + MAX_SPLIT_COUNT); + LOG.info("Split count increment = " + SPLIT_COUNT_INCR); + LOG.info("Max bytes per file = " + MAX_BYTES); + LOG.info("Max number of files = " + MAX_NUM_FILES); + LOG.info("Number of files increment = " + NUM_FILES_INCR); MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat(); FileSystem fs = FileSystem.getLocal(job);