http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java index d4514c6..519fcd7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java @@ -22,8 +22,6 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -36,6 +34,8 @@ import org.apache.hadoop.util.bloom.DynamicBloomFilter; import org.apache.hadoop.util.bloom.Filter; import org.apache.hadoop.util.bloom.Key; import org.apache.hadoop.util.hash.Hash; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_KEY; @@ -52,7 +52,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOO @InterfaceAudience.Public @InterfaceStability.Stable public class BloomMapFile { - private static final Log LOG = LogFactory.getLog(BloomMapFile.class); + private static final Logger LOG = LoggerFactory.getLogger(BloomMapFile.class); public static final String BLOOM_FILE_NAME = "bloom"; public static final int HASH_COUNT = 5;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java index a3fea31..d5ab4d2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java @@ -22,11 +22,10 @@ import java.nio.ByteOrder; import java.security.AccessController; import java.security.PrivilegedAction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Unsafe; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.google.common.primitives.Longs; import com.google.common.primitives.UnsignedBytes; @@ -36,7 +35,7 @@ import com.google.common.primitives.UnsignedBytes; * class to be able to compare arrays that start at non-zero offsets. */ abstract class FastByteComparisons { - static final Log LOG = LogFactory.getLog(FastByteComparisons.class); + static final Logger LOG = LoggerFactory.getLogger(FastByteComparisons.class); /** * Lexicographically compare two byte arrays. http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java index b639edc..6142cd6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java @@ -32,13 +32,13 @@ import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.Shell; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -49,7 +49,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_ @InterfaceAudience.Public @InterfaceStability.Evolving public class IOUtils { - public static final Log LOG = LogFactory.getLog(IOUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(IOUtils.class); /** * Copies from one stream to another. http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 5ba506a..fde1c86 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -37,6 +35,8 @@ import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.util.Options; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_KEY; @@ -60,7 +60,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SK @InterfaceAudience.Public @InterfaceStability.Stable public class MapFile { - private static final Log LOG = LogFactory.getLog(MapFile.class); + private static final Logger LOG = LoggerFactory.getLogger(MapFile.class); /** The name of the index file. */ public static final String INDEX_FILE_NAME = "index"; @@ -1002,7 +1002,7 @@ public class MapFile { while (reader.next(key, value)) // copy all entries writer.append(key, value); } finally { - IOUtils.cleanup(LOG, writer, reader); + IOUtils.cleanupWithLogger(LOG, writer, reader); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java index a8c0690..2e65f12 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java @@ -23,8 +23,6 @@ import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.nativeio.NativeIO; @@ -33,6 +31,8 @@ import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_WILLNEED; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Manages a pool of threads which can issue readahead requests on file descriptors. @@ -40,7 +40,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; @InterfaceAudience.Private @InterfaceStability.Evolving public class ReadaheadPool { - static final Log LOG = LogFactory.getLog(ReadaheadPool.class); + static final Logger LOG = LoggerFactory.getLogger(ReadaheadPool.class); private static final int POOL_SIZE = 4; private static final int MAX_POOL_SIZE = 16; private static final int CAPACITY = 1024; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 16ee874..253acea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -24,7 +24,6 @@ import java.util.*; import java.rmi.server.UID; import java.security.MessageDigest; -import org.apache.commons.logging.*; import org.apache.hadoop.util.Options; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.Options.CreateOpts; @@ -50,6 +49,8 @@ import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.MergeSort; import org.apache.hadoop.util.PriorityQueue; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -202,7 +203,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_SKIP_CHECKSU @InterfaceAudience.Public @InterfaceStability.Stable public class SequenceFile { - private static final Log LOG = LogFactory.getLog(SequenceFile.class); + private static final Logger LOG = LoggerFactory.getLogger(SequenceFile.class); private SequenceFile() {} // no public ctor @@ -1893,7 +1894,7 @@ public class SequenceFile { succeeded = true; } finally { if (!succeeded) { - IOUtils.cleanup(LOG, this.in); + IOUtils.cleanupWithLogger(LOG, this.in); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java index 89f1e42..f5d33a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java @@ -25,9 +25,10 @@ import java.io.UTFDataFormatException; import org.apache.hadoop.util.StringUtils; -import org.apache.commons.logging.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** A WritableComparable for strings that uses the UTF8 encoding. * @@ -42,7 +43,7 @@ import org.apache.hadoop.classification.InterfaceStability; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Stable public class UTF8 implements WritableComparable<UTF8> { - private static final Log LOG= LogFactory.getLog(UTF8.class); + private static final Logger LOG= LoggerFactory.getLogger(UTF8.class); private static final DataInputBuffer IBUF = new DataInputBuffer(); private static final ThreadLocal<DataOutputBuffer> OBUF_FACTORY = http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java index 01bffa7..f103aad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java @@ -23,8 +23,6 @@ import java.util.Set; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -33,6 +31,8 @@ import org.apache.hadoop.util.ReflectionUtils; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A global compressor/decompressor pool used to save and reuse @@ -41,7 +41,7 @@ import com.google.common.cache.LoadingCache; @InterfaceAudience.Public @InterfaceStability.Evolving public class CodecPool { - private static final Log LOG = LogFactory.getLog(CodecPool.class); + private static final Logger LOG = LoggerFactory.getLogger(CodecPool.class); /** * A global compressor pool used to save the expensive http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index 8fff75d..3701f20 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -19,8 +19,6 @@ package org.apache.hadoop.io.compress; import java.util.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -28,6 +26,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A factory that will find the correct codec for a given filename. @@ -36,8 +36,8 @@ import org.apache.hadoop.util.StringUtils; @InterfaceStability.Evolving public class CompressionCodecFactory { - public static final Log LOG = - LogFactory.getLog(CompressionCodecFactory.class.getName()); + public static final Logger LOG = + LoggerFactory.getLogger(CompressionCodecFactory.class.getName()); private static final ServiceLoader<CompressionCodec> CODEC_PROVIDERS = ServiceLoader.load(CompressionCodec.class); http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java index 31196cc..33f39ef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java @@ -22,14 +22,14 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.zlib.ZlibDecompressor; import org.apache.hadoop.io.compress.zlib.ZlibFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -37,7 +37,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_ @InterfaceAudience.Public @InterfaceStability.Evolving public class DefaultCodec implements Configurable, CompressionCodec, DirectDecompressionCodec { - private static final Log LOG = LogFactory.getLog(DefaultCodec.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultCodec.class); Configuration conf; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java index a973dc9..d4a9787 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java @@ -24,9 +24,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the popular @@ -42,7 +41,8 @@ public class Bzip2Compressor implements Compressor { static final int DEFAULT_BLOCK_SIZE = 9; static final int DEFAULT_WORK_FACTOR = 30; - private static final Log LOG = LogFactory.getLog(Bzip2Compressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(Bzip2Compressor.class); private long stream; private int blockSize; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java index 3135165..96693ad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java @@ -23,9 +23,8 @@ import java.nio.Buffer; import java.nio.ByteBuffer; import org.apache.hadoop.io.compress.Decompressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the popular @@ -36,7 +35,8 @@ import org.apache.commons.logging.LogFactory; public class Bzip2Decompressor implements Decompressor { private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024; - private static final Log LOG = LogFactory.getLog(Bzip2Decompressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(Bzip2Decompressor.class); private long stream; private boolean conserveMemory; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java index 0bbcc36..7ddae77 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java @@ -18,8 +18,6 @@ package org.apache.hadoop.io.compress.bzip2; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.util.NativeCodeLoader; @@ -30,6 +28,8 @@ import org.apache.hadoop.io.compress.bzip2.Bzip2Compressor; import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor; import org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor; import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of factories to create the right @@ -37,7 +37,7 @@ import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor; * */ public class Bzip2Factory { - private static final Log LOG = LogFactory.getLog(Bzip2Factory.class); + private static final Logger LOG = LoggerFactory.getLogger(Bzip2Factory.class); private static String bzip2LibraryName = ""; private static boolean nativeBzip2Loaded; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java index ccfae8b..3792c36 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java @@ -22,19 +22,19 @@ import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the lz4 compression algorithm. * http://code.google.com/p/lz4/ */ public class Lz4Compressor implements Compressor { - private static final Log LOG = - LogFactory.getLog(Lz4Compressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(Lz4Compressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java index 685956c..f26ae84 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java @@ -22,18 +22,18 @@ import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the lz4 compression algorithm. * http://code.google.com/p/lz4/ */ public class Lz4Decompressor implements Decompressor { - private static final Log LOG = - LogFactory.getLog(Lz4Compressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(Lz4Compressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java index 814718d..3d38680 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java @@ -22,19 +22,19 @@ import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the snappy compression algorithm. * http://code.google.com/p/snappy/ */ public class SnappyCompressor implements Compressor { - private static final Log LOG = - LogFactory.getLog(SnappyCompressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SnappyCompressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java index 8712431..f31b76c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java @@ -22,19 +22,19 @@ import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DirectDecompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the snappy compression algorithm. * http://code.google.com/p/snappy/ */ public class SnappyDecompressor implements Decompressor { - private static final Log LOG = - LogFactory.getLog(SnappyDecompressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SnappyDecompressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java index 509456e..739788f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java @@ -23,9 +23,8 @@ import java.util.zip.Deflater; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A wrapper around java.util.zip.Deflater to make it conform @@ -34,7 +33,8 @@ import org.apache.commons.logging.LogFactory; */ public class BuiltInZlibDeflater extends Deflater implements Compressor { - private static final Log LOG = LogFactory.getLog(BuiltInZlibDeflater.class); + private static final Logger LOG = + LoggerFactory.getLogger(BuiltInZlibDeflater.class); public BuiltInZlibDeflater(int level, boolean nowrap) { super(level, nowrap); http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index 24d98a5..d7b153b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -25,9 +25,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the popular @@ -37,7 +36,8 @@ import org.apache.commons.logging.LogFactory; */ public class ZlibCompressor implements Compressor { - private static final Log LOG = LogFactory.getLog(ZlibCompressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(ZlibCompressor.class); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java index 4112d27..a2bad42 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java @@ -18,8 +18,6 @@ package org.apache.hadoop.io.compress.zlib; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; @@ -28,6 +26,8 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel; import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of factories to create the right @@ -35,8 +35,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; * */ public class ZlibFactory { - private static final Log LOG = - LogFactory.getLog(ZlibFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(ZlibFactory.class); private static boolean nativeZlibLoaded = false; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java index ce93266..43d8299 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java @@ -30,8 +30,6 @@ import java.util.Arrays; import java.util.Map; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -43,6 +41,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.ScalarComparator; import org.apache.hadoop.io.file.tfile.CompareUtils.ScalarLong; import org.apache.hadoop.io.file.tfile.Compression.Algorithm; import org.apache.hadoop.io.file.tfile.Utils.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Block Compressed file, the underlying physical storage layer for TFile. @@ -54,7 +54,7 @@ final class BCFile { // the current version of BCFile impl, increment them (major or minor) made // enough changes static final Version API_VERSION = new Version((short) 1, (short) 0); - static final Log LOG = LogFactory.getLog(BCFile.class); + static final Logger LOG = LoggerFactory.getLogger(BCFile.class); /** * Prevent the instantiation of BCFile objects. http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java index f7ec7ac..2298dc0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java @@ -24,8 +24,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; @@ -35,6 +33,8 @@ import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY; @@ -44,7 +44,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_ * Compression related stuff. */ final class Compression { - static final Log LOG = LogFactory.getLog(Compression.class); + static final Logger LOG = LoggerFactory.getLogger(Compression.class); /** * Prevent the instantiation of class. http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 56739c6..c63baa5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -29,8 +29,6 @@ import java.io.OutputStream; import java.util.ArrayList; import java.util.Comparator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -51,6 +49,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.BytesComparator; import org.apache.hadoop.io.file.tfile.CompareUtils.MemcmpRawComparator; import org.apache.hadoop.io.file.tfile.Utils.Version; import org.apache.hadoop.io.serializer.JavaSerializationComparator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A TFile is a container of key-value pairs. Both keys and values are type-less @@ -131,7 +131,7 @@ import org.apache.hadoop.io.serializer.JavaSerializationComparator; @InterfaceAudience.Public @InterfaceStability.Evolving public class TFile { - static final Log LOG = LogFactory.getLog(TFile.class); + static final Logger LOG = LoggerFactory.getLogger(TFile.class); private static final String CHUNK_BUF_SIZE_ATTR = "tfile.io.chunk.size"; private static final String FS_INPUT_BUF_SIZE_ATTR = @@ -335,7 +335,7 @@ public class TFile { writerBCF.close(); } } finally { - IOUtils.cleanup(LOG, blkAppender, writerBCF); + IOUtils.cleanupWithLogger(LOG, blkAppender, writerBCF); blkAppender = null; writerBCF = null; state = State.CLOSED; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java index 84b92ec..3ef6b27 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java @@ -25,8 +25,6 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -36,12 +34,14 @@ import org.apache.hadoop.io.file.tfile.BCFile.BlockRegion; import org.apache.hadoop.io.file.tfile.BCFile.MetaIndexEntry; import org.apache.hadoop.io.file.tfile.TFile.TFileIndexEntry; import org.apache.hadoop.io.file.tfile.Utils.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Dumping the information of a TFile. */ class TFileDumper { - static final Log LOG = LogFactory.getLog(TFileDumper.class); + static final Logger LOG = LoggerFactory.getLogger(TFileDumper.class); private TFileDumper() { // namespace object not constructable. @@ -290,7 +290,7 @@ class TFileDumper { } } } finally { - IOUtils.cleanup(LOG, reader, fsdis); + IOUtils.cleanupWithLogger(LOG, reader, fsdis); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index 7e9283e..d95efb6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -40,9 +40,9 @@ import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.PerformanceAdvisory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Unsafe; import com.google.common.annotations.VisibleForTesting; @@ -98,7 +98,7 @@ public class NativeIO { write. */ public static int SYNC_FILE_RANGE_WAIT_AFTER = 4; - private static final Log LOG = LogFactory.getLog(NativeIO.class); + private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class); // Set to true via JNI if possible public static boolean fadvisePossible = false; @@ -634,7 +634,7 @@ public class NativeIO { } } - private static final Log LOG = LogFactory.getLog(NativeIO.class); + private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class); private static boolean nativeLoaded = false; @@ -940,10 +940,10 @@ public class NativeIO { position += transferred; } } finally { - IOUtils.cleanup(LOG, output); - IOUtils.cleanup(LOG, fos); - IOUtils.cleanup(LOG, input); - IOUtils.cleanup(LOG, fis); + IOUtils.cleanupWithLogger(LOG, output); + IOUtils.cleanupWithLogger(LOG, fos); + IOUtils.cleanupWithLogger(LOG, input); + IOUtils.cleanupWithLogger(LOG, fis); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java index 306244a..4126344 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java @@ -22,10 +22,10 @@ import java.io.IOException; import java.io.FileDescriptor; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A factory for creating shared file descriptors inside a given directory. @@ -45,7 +45,8 @@ import org.apache.hadoop.classification.InterfaceStability; @InterfaceAudience.Private @InterfaceStability.Unstable public class SharedFileDescriptorFactory { - public static final Log LOG = LogFactory.getLog(SharedFileDescriptorFactory.class); + public static final Logger LOG = + LoggerFactory.getLogger(SharedFileDescriptorFactory.class); private final String prefix; private final String path; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index d6f3e04..fa0cb6e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -32,8 +32,6 @@ import java.util.Map.Entry; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.StandbyException; @@ -41,6 +39,8 @@ import org.apache.hadoop.net.ConnectTimeoutException; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * <p> @@ -49,7 +49,7 @@ import com.google.common.annotations.VisibleForTesting; */ public class RetryPolicies { - public static final Log LOG = LogFactory.getLog(RetryPolicies.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryPolicies.class); /** * <p> http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java index 15a9b54..1f5acfe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java @@ -19,17 +19,17 @@ package org.apache.hadoop.io.retry; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.retry.RetryPolicies.MultipleLinearRandomRetry; import org.apache.hadoop.ipc.RemoteException; import com.google.protobuf.ServiceException; import org.apache.hadoop.ipc.RetriableException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class RetryUtils { - public static final Log LOG = LogFactory.getLog(RetryUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryUtils.class); /** * Return the default retry policy set in conf. http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java index aa3c86a..a9787a0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java @@ -21,8 +21,6 @@ package org.apache.hadoop.io.serializer; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -31,6 +29,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization; import org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * <p> @@ -41,8 +41,8 @@ import org.apache.hadoop.util.ReflectionUtils; @InterfaceStability.Evolving public class SerializationFactory extends Configured { - private static final Log LOG = - LogFactory.getLog(SerializationFactory.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SerializationFactory.class.getName()); private List<Serialization<?>> serializations = new ArrayList<Serialization<?>>(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java index 2764788..d1bd180 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java @@ -28,20 +28,21 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstracts queue operations for different blocking queues. */ public class CallQueueManager<E extends Schedulable> extends AbstractQueue<E> implements BlockingQueue<E> { - public static final Log LOG = LogFactory.getLog(CallQueueManager.class); + public static final Logger LOG = + LoggerFactory.getLogger(CallQueueManager.class); // Number of checkpoints for empty queue. private static final int CHECKPOINT_NUM = 20; // Interval to check empty queue. http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index 4164c7d..c225d99 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -21,8 +21,6 @@ package org.apache.hadoop.ipc; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability; @@ -57,6 +55,8 @@ import org.apache.hadoop.util.Time; import org.apache.hadoop.util.concurrent.AsyncGet; import org.apache.htrace.core.Span; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.SocketFactory; import javax.security.sasl.Sasl; @@ -84,7 +84,7 @@ import static org.apache.hadoop.ipc.RpcConstants.PING_CALL_ID; @InterfaceStability.Evolving public class Client implements AutoCloseable { - public static final Log LOG = LogFactory.getLog(Client.class); + public static final Logger LOG = LoggerFactory.getLogger(Client.class); /** A counter for generating call IDs. */ private static final AtomicInteger callIdCounter = new AtomicInteger(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java index 8bcaf05..20161b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java @@ -33,11 +33,11 @@ import java.util.concurrent.atomic.AtomicLong; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException; import org.apache.hadoop.metrics2.util.MBeans; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A queue with multiple levels for each priority. @@ -50,7 +50,7 @@ public class FairCallQueue<E extends Schedulable> extends AbstractQueue<E> public static final String IPC_CALLQUEUE_PRIORITY_LEVELS_KEY = "faircallqueue.priority-levels"; - public static final Log LOG = LogFactory.getLog(FairCallQueue.class); + public static final Logger LOG = LoggerFactory.getLogger(FairCallQueue.class); /* The queues */ private final ArrayList<BlockingQueue<E>> queues; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index 3c0aaba..190c550 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -21,8 +21,6 @@ package org.apache.hadoop.ipc; import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.*; import com.google.protobuf.Descriptors.MethodDescriptor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -39,6 +37,8 @@ import org.apache.hadoop.util.Time; import org.apache.hadoop.util.concurrent.AsyncGet; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.SocketFactory; import java.io.IOException; @@ -55,7 +55,8 @@ import java.util.concurrent.atomic.AtomicBoolean; */ @InterfaceStability.Evolving public class ProtobufRpcEngine implements RpcEngine { - public static final Log LOG = LogFactory.getLog(ProtobufRpcEngine.class); + public static final Logger LOG = + LoggerFactory.getLogger(ProtobufRpcEngine.class); private static final ThreadLocal<AsyncGet<Message, Exception>> ASYNC_RETURN_MESSAGE = new ThreadLocal<>(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index 3f68d63..6d96eab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import javax.net.SocketFactory; -import org.apache.commons.logging.*; - import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.*; @@ -59,6 +57,8 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Time; import com.google.protobuf.BlockingService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** A simple RPC mechanism. * @@ -109,7 +109,7 @@ public class RPC { Writable rpcRequest, long receiveTime) throws Exception ; } - static final Log LOG = LogFactory.getLog(RPC.class); + static final Logger LOG = LoggerFactory.getLogger(RPC.class); /** * Get all superInterfaces that extend VersionedProtocol http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java index ee84a04..e67e8d9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java @@ -24,9 +24,9 @@ import com.google.common.base.Joiner; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Used to registry custom methods to refresh at runtime. @@ -34,7 +34,8 @@ import org.apache.hadoop.classification.InterfaceStability; */ @InterfaceStability.Unstable public class RefreshRegistry { - public static final Log LOG = LogFactory.getLog(RefreshRegistry.class); + public static final Logger LOG = + LoggerFactory.getLogger(RefreshRegistry.class); // Used to hold singleton instance private static class RegistryHolder { http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java index 7b85286..6f6ceb5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java @@ -22,8 +22,6 @@ import java.util.Arrays; import java.util.UUID; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.ipc.metrics.RetryCacheMetrics; import org.apache.hadoop.util.LightWeightCache; @@ -32,6 +30,8 @@ import org.apache.hadoop.util.LightWeightGSet.LinkedElement; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Maintains a cache of non-idempotent requests that have been successfully @@ -44,7 +44,7 @@ import com.google.common.base.Preconditions; */ @InterfaceAudience.Private public class RetryCache { - public static final Log LOG = LogFactory.getLog(RetryCache.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryCache.class); private final RetryCacheMetrics retryCacheMetrics; private static final int MAX_CAPACITY = 16; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index 85b8bfd..a2237bf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -69,8 +69,6 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -124,6 +122,8 @@ import com.google.protobuf.ByteString; import com.google.protobuf.CodedOutputStream; import com.google.protobuf.Message; import org.codehaus.jackson.map.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An abstract IPC service. IPC calls take a single {@link Writable} as a * parameter, and return a {@link Writable} as their value. A service runs on @@ -292,9 +292,9 @@ public abstract class Server { } - public static final Log LOG = LogFactory.getLog(Server.class); - public static final Log AUDITLOG = - LogFactory.getLog("SecurityLogger."+Server.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(Server.class); + public static final Logger AUDITLOG = + LoggerFactory.getLogger("SecurityLogger."+Server.class.getName()); private static final String AUTH_FAILED_FOR = "Auth failed for "; private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for "; @@ -1112,7 +1112,7 @@ public abstract class Server { } catch (IOException ex) { LOG.error("Error in Reader", ex); } catch (Throwable re) { - LOG.fatal("Bug in read selector!", re); + LOG.error("Bug in read selector!", re); ExitUtil.terminate(1, "Bug in read selector!"); } } @@ -2620,7 +2620,7 @@ public abstract class Server { } } finally { CurCall.set(null); - IOUtils.cleanup(LOG, traceScope); + IOUtils.cleanupWithLogger(LOG, traceScope); } } LOG.debug(Thread.currentThread().getName() + ": exiting"); @@ -2629,7 +2629,7 @@ public abstract class Server { } @VisibleForTesting - void logException(Log logger, Throwable e, Call call) { + void logException(Logger logger, Throwable e, Call call) { if (exceptionsHandler.isSuppressedLog(e.getClass())) { return; // Log nothing. } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java index cfda947..d308725 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java @@ -20,9 +20,9 @@ package org.apache.hadoop.ipc; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Determines which queue to start reading from, occasionally drawing from @@ -43,8 +43,8 @@ public class WeightedRoundRobinMultiplexer implements RpcMultiplexer { public static final String IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY = "faircallqueue.multiplexer.weights"; - public static final Log LOG = - LogFactory.getLog(WeightedRoundRobinMultiplexer.class); + public static final Logger LOG = + LoggerFactory.getLogger(WeightedRoundRobinMultiplexer.class); private final int numQueues; // The number of queues under our provisioning http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index df45d62..d122b0d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -28,8 +28,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import javax.net.SocketFactory; -import org.apache.commons.logging.*; - import org.apache.hadoop.io.*; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.ipc.Client.ConnectionId; @@ -43,11 +41,13 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.*; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An RpcEngine implementation for Writable data. */ @InterfaceStability.Evolving public class WritableRpcEngine implements RpcEngine { - private static final Log LOG = LogFactory.getLog(RPC.class); + private static final Logger LOG = LoggerFactory.getLogger(RPC.class); //writableRpcVersion should be updated if there is a change //in format of the rpc messages. http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java index a853d64..fc09e0a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.ipc.RetryCache; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.annotation.Metric; @@ -26,6 +24,8 @@ import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.lib.MutableCounterLong; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining the various RetryCache-related statistics @@ -35,7 +35,7 @@ import org.apache.hadoop.metrics2.lib.MutableCounterLong; @Metrics(about="Aggregate RetryCache metrics", context="rpc") public class RetryCacheMetrics { - static final Log LOG = LogFactory.getLog(RetryCacheMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RetryCacheMetrics.class); final MetricsRegistry registry; final String name; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java index 8b7e995..6ed57ec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining RPC method related statistics @@ -37,7 +37,7 @@ public class RpcDetailedMetrics { @Metric MutableRatesWithAggregation rates; @Metric MutableRatesWithAggregation deferredRpcRates; - static final Log LOG = LogFactory.getLog(RpcDetailedMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RpcDetailedMetrics.class); final MetricsRegistry registry; final String name; @@ -45,7 +45,7 @@ public class RpcDetailedMetrics { name = "RpcDetailedActivityForPort"+ port; registry = new MetricsRegistry("rpcdetailed") .tag("port", "RPC port", String.valueOf(port)); - LOG.debug(registry.info()); + LOG.debug(registry.info().toString()); } public String name() { return name; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java index 8ce1379..d53d7d3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.classification.InterfaceAudience; @@ -31,6 +29,8 @@ import org.apache.hadoop.metrics2.lib.MutableCounterInt; import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableQuantiles; import org.apache.hadoop.metrics2.lib.MutableRate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining the various RPC statistics @@ -40,7 +40,7 @@ import org.apache.hadoop.metrics2.lib.MutableRate; @Metrics(about="Aggregate RPC metrics", context="rpc") public class RpcMetrics { - static final Log LOG = LogFactory.getLog(RpcMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RpcMetrics.class); final Server server; final MetricsRegistry registry; final String name; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index 6546c05..c8b67bd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -17,11 +17,11 @@ package org.apache.hadoop.jmx; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.http.HttpServer2; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; @@ -116,7 +116,8 @@ import java.util.Set; * */ public class JMXJsonServlet extends HttpServlet { - private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class); + private static final Logger LOG = + LoggerFactory.getLogger(JMXJsonServlet.class); static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods"; static final String ACCESS_CONTROL_ALLOW_ORIGIN = http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java index 14a3e33..3759df7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java @@ -20,10 +20,10 @@ package org.apache.hadoop.metrics; import java.net.InetAddress; import java.net.UnknownHostException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility class to simplify creation and reporting of hadoop metrics. @@ -39,8 +39,7 @@ import org.apache.hadoop.classification.InterfaceStability; @InterfaceStability.Evolving public class MetricsUtil { - public static final Log LOG = - LogFactory.getLog(MetricsUtil.class); + public static final Logger LOG = LoggerFactory.getLogger(MetricsUtil.class); /** * Don't allow creation of a new instance of Metrics http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java index 67414c7..c0a278f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java @@ -25,8 +25,6 @@ import java.util.List; import java.util.Map; import org.apache.commons.io.Charsets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -34,6 +32,8 @@ import org.apache.hadoop.metrics.ContextFactory; import org.apache.hadoop.metrics.spi.AbstractMetricsContext; import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.Util; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Context for sending metrics to Ganglia. @@ -63,7 +63,7 @@ public class GangliaContext extends AbstractMetricsContext { private static final int BUFFER_SIZE = 1500; // as per libgmond.c private static final int DEFAULT_MULTICAST_TTL = 1; - private final Log LOG = LogFactory.getLog(this.getClass()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass()); private static final Map<Class,String> typeTable = new HashMap<Class,String>(5); @@ -126,7 +126,7 @@ public class GangliaContext extends AbstractMetricsContext { datagramSocket = new DatagramSocket(); } } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString()); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java index 0cfd31d..6e803a3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java @@ -23,11 +23,11 @@ import java.net.DatagramPacket; import java.net.SocketAddress; import java.net.UnknownHostException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.metrics.ContextFactory; import org.apache.hadoop.net.DNS; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Context for sending metrics to Ganglia version 3.1.x. @@ -42,8 +42,8 @@ public class GangliaContext31 extends GangliaContext { String hostName = "UNKNOWN.example.com"; - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.util.GangliaContext31"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.util.GangliaContext31"); public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); @@ -62,7 +62,7 @@ public class GangliaContext31 extends GangliaContext { conf.get("dfs.datanode.dns.interface","default"), conf.get("dfs.datanode.dns.nameserver","default")); } catch (UnknownHostException uhe) { - LOG.error(uhe); + LOG.error(uhe.toString()); hostName = "UNKNOWN.example.com"; } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java index ed9d3c9..1b1b1ea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java @@ -29,12 +29,12 @@ import org.apache.hadoop.metrics.MetricsContext; import org.apache.hadoop.metrics.MetricsRecord; import org.apache.hadoop.metrics.MetricsUtil; import org.apache.hadoop.metrics.Updater; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static java.lang.Thread.State.*; import java.lang.management.GarbageCollectorMXBean; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; /** * Singleton class which reports Java Virtual Machine metrics to the metrics API. @@ -50,7 +50,7 @@ public class JvmMetrics implements Updater { private static final float M = 1024*1024; private static JvmMetrics theInstance = null; - private static Log log = LogFactory.getLog(JvmMetrics.class); + private static Logger log = LoggerFactory.getLogger(JvmMetrics.class); private MetricsRecord metrics; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java index f9d3442..b04952e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java @@ -23,9 +23,6 @@ import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; @@ -33,6 +30,8 @@ import org.apache.hadoop.metrics.MetricsContext; import org.apache.hadoop.metrics.MetricsRecord; import org.apache.hadoop.metrics.MetricsUtil; import org.apache.hadoop.metrics.Updater; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @deprecated Use org.apache.hadoop.metrics2 package instead. @@ -42,7 +41,8 @@ import org.apache.hadoop.metrics.Updater; @InterfaceStability.Evolving public class CompositeContext extends AbstractMetricsContext { - private static final Log LOG = LogFactory.getLog(CompositeContext.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompositeContext.class); private static final String ARITY_LABEL = "arity"; private static final String SUB_FMT = "%s.sub%d"; private final ArrayList<MetricsContext> subctxt = http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java index 2199346..435ffff 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java @@ -19,8 +19,8 @@ package org.apache.hadoop.metrics.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The MetricsIntValue class is for a metric that is not time varied @@ -34,8 +34,8 @@ import org.apache.commons.logging.LogFactory; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public class MetricsIntValue extends MetricsBase { - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.metrics.util"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.metrics.util"); private int value; private boolean changed; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java index 30a5f61..9661245 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java @@ -19,8 +19,8 @@ package org.apache.hadoop.metrics.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The MetricsTimeVaryingInt class is for a metric that naturally @@ -37,8 +37,8 @@ import org.apache.commons.logging.LogFactory; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public class MetricsTimeVaryingInt extends MetricsBase { - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.metrics.util"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.metrics.util"); private int currentValue; private int previousIntervalValue; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java index ad2fdf6..666a1a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java @@ -20,8 +20,8 @@ package org.apache.hadoop.metrics.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The MetricsTimeVaryingLong class is for a metric that naturally @@ -38,8 +38,8 @@ import org.apache.commons.logging.LogFactory; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public class MetricsTimeVaryingLong extends MetricsBase{ - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.metrics.util"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.metrics.util"); private long currentValue; private long previousIntervalValue; --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
