HDFS-13695. Move logging to slf4j in HDFS package. Contributed by Ian Pickering.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/eca1a4bf
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/eca1a4bf
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/eca1a4bf

Branch: refs/heads/trunk
Commit: eca1a4bfe952fc184fe90dde50bac9b0e5293568
Parents: b3161c4
Author: Giovanni Matteo Fumarola <[email protected]>
Authored: Thu Sep 6 14:48:00 2018 -0700
Committer: Giovanni Matteo Fumarola <[email protected]>
Committed: Thu Sep 6 14:48:00 2018 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hdfs/DFSClient.java  |  4 +--
 .../org/apache/hadoop/hdfs/DFSUtilClient.java   |  2 +-
 .../java/org/apache/hadoop/hdfs/PeerCache.java  |  8 ++---
 .../hdfs/client/impl/BlockReaderFactory.java    | 12 +++----
 .../client/impl/BlockReaderLocalLegacy.java     |  2 +-
 .../hdfs/shortcircuit/ShortCircuitCache.java    |  4 +--
 .../hdfs/shortcircuit/ShortCircuitReplica.java  |  2 +-
 .../apache/hadoop/hdfs/util/IOUtilsClient.java  |  3 +-
 .../hadoop/hdfs/nfs/nfs3/OpenFileCtx.java       |  4 +--
 .../hadoop/hdfs/nfs/nfs3/OpenFileCtxCache.java  |  6 ++--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java    |  7 +++--
 .../org/apache/hadoop/hdfs/HdfsDtFetcher.java   |  7 +++--
 .../org/apache/hadoop/hdfs/NameNodeProxies.java |  7 +++--
 .../apache/hadoop/hdfs/SWebHdfsDtFetcher.java   |  7 +++--
 .../apache/hadoop/hdfs/WebHdfsDtFetcher.java    |  7 +++--
 .../hadoop/hdfs/net/DomainPeerServer.java       |  6 ++--
 .../apache/hadoop/hdfs/net/TcpPeerServer.java   |  6 ++--
 .../hdfs/qjournal/client/AsyncLoggerSet.java    |  6 ++--
 .../qjournal/client/QuorumJournalManager.java   |  6 ++--
 .../qjournal/server/GetJournalEditServlet.java  |  7 +++--
 .../hadoop/hdfs/qjournal/server/Journal.java    | 12 +++----
 .../hdfs/qjournal/server/JournalNode.java       | 10 +++---
 .../qjournal/server/JournalNodeRpcServer.java   |  4 +--
 .../token/block/BlockTokenSecretManager.java    |  7 +++--
 .../DelegationTokenSecretManager.java           |  8 ++---
 .../hadoop/hdfs/server/balancer/Balancer.java   |  8 ++---
 .../hadoop/hdfs/server/balancer/Dispatcher.java |  6 ++--
 .../hdfs/server/balancer/NameNodeConnector.java |  7 +++--
 .../AvailableSpaceBlockPlacementPolicy.java     |  8 ++---
 .../server/blockmanagement/DatanodeManager.java |  6 ++--
 .../server/blockmanagement/HostFileManager.java |  7 +++--
 .../hadoop/hdfs/server/common/JspHelper.java    |  6 ++--
 .../hdfs/server/common/MetricsLoggerTask.java   |  6 ++--
 .../apache/hadoop/hdfs/server/common/Util.java  |  7 +++--
 .../hdfs/server/datanode/DirectoryScanner.java  |  7 +++--
 .../server/datanode/ProfilingFileIoEvents.java  |  7 +++--
 .../server/datanode/ShortCircuitRegistry.java   |  7 +++--
 .../AvailableSpaceVolumeChoosingPolicy.java     |  7 +++--
 .../RoundRobinVolumeChoosingPolicy.java         |  7 +++--
 .../datanode/fsdataset/impl/BlockPoolSlice.java |  8 ++---
 .../impl/FsDatasetAsyncDiskService.java         |  7 +++--
 .../impl/RamDiskAsyncLazyPersistService.java    |  7 +++--
 .../fsdataset/impl/RamDiskReplicaTracker.java   |  7 +++--
 .../server/datanode/web/DatanodeHttpServer.java |  6 ++--
 .../web/RestCsrfPreventionFilterHandler.java    |  4 +--
 .../datanode/web/SimpleHttpProxyHandler.java    |  4 +--
 .../web/webhdfs/DataNodeUGIProvider.java        |  6 ++--
 .../datanode/web/webhdfs/ExceptionHandler.java  |  4 +--
 .../server/datanode/web/webhdfs/HdfsWriter.java |  8 ++---
 .../datanode/web/webhdfs/WebHdfsHandler.java    | 10 +++---
 .../apache/hadoop/hdfs/server/mover/Mover.java  | 12 +++----
 .../hadoop/hdfs/server/namenode/CachePool.java  |  2 --
 .../hdfs/server/namenode/CheckpointConf.java    |  7 +++--
 .../hdfs/server/namenode/Checkpointer.java      |  8 ++---
 .../ContentSummaryComputationContext.java       |  8 ++---
 .../hadoop/hdfs/server/namenode/DfsServlet.java |  7 +++--
 .../namenode/EditLogBackupOutputStream.java     |  7 +++--
 .../server/namenode/EditLogFileInputStream.java |  8 ++---
 .../namenode/EditLogFileOutputStream.java       | 11 ++++---
 .../hdfs/server/namenode/EditsDoubleBuffer.java |  7 +++--
 .../hdfs/server/namenode/FSEditLogAsync.java    |  8 ++---
 .../hdfs/server/namenode/FSEditLogLoader.java   |  7 +++--
 .../hadoop/hdfs/server/namenode/FSImage.java    |  9 +++---
 .../hdfs/server/namenode/FSImageFormat.java     |  6 ++--
 .../server/namenode/FSImageFormatPBINode.java   |  7 +++--
 ...FSImagePreTransactionalStorageInspector.java | 10 +++---
 .../FSImageTransactionalStorageInspector.java   |  6 ++--
 .../hdfs/server/namenode/FSNamesystem.java      |  2 +-
 .../server/namenode/FSPermissionChecker.java    |  6 ++--
 .../server/namenode/FileJournalManager.java     |  7 +++--
 .../hadoop/hdfs/server/namenode/INode.java      |  6 ++--
 .../hdfs/server/namenode/INodesInPath.java      |  6 ++--
 .../hdfs/server/namenode/ImageServlet.java      |  6 ++--
 .../hadoop/hdfs/server/namenode/JournalSet.java |  8 ++---
 .../server/namenode/MetaRecoveryContext.java    |  7 +++--
 .../namenode/NNStorageRetentionManager.java     |  6 ++--
 .../hdfs/server/namenode/NNUpgradeUtil.java     |  7 +++--
 .../hadoop/hdfs/server/namenode/NameCache.java  |  6 ++--
 .../namenode/NameNodeResourceChecker.java       |  7 +++--
 .../hdfs/server/namenode/NamenodeFsck.java      |  9 +++---
 .../namenode/RedundantEditLogInputStream.java   |  8 ++---
 .../hdfs/server/namenode/SecondaryNameNode.java | 16 +++++-----
 .../server/namenode/StartupProgressServlet.java |  2 +-
 .../hdfs/server/namenode/TransferFsImage.java   |  7 +++--
 .../server/namenode/ha/BootstrapStandby.java    | 13 ++++----
 .../hdfs/server/namenode/ha/EditLogTailer.java  |  8 ++---
 .../web/resources/NamenodeWebHdfsMethods.java   |  7 +++--
 .../org/apache/hadoop/hdfs/tools/DFSAdmin.java  |  6 ++--
 .../apache/hadoop/hdfs/tools/DFSHAAdmin.java    |  6 ++--
 .../hdfs/tools/DFSZKFailoverController.java     | 12 +++----
 .../hdfs/tools/DelegationTokenFetcher.java      |  8 ++---
 .../org/apache/hadoop/hdfs/tools/GetGroups.java |  6 ++--
 .../OfflineEditsBinaryLoader.java               | 10 +++---
 .../offlineImageViewer/FSImageHandler.java      |  7 +++--
 .../tools/offlineImageViewer/FSImageLoader.java |  7 +++--
 .../OfflineImageReconstructor.java              | 10 +++---
 .../offlineImageViewer/OfflineImageViewer.java  |  9 +++---
 .../OfflineImageViewerPB.java                   |  7 +++--
 .../offlineImageViewer/WebImageViewer.java      |  7 +++--
 .../hdfs/util/AtomicFileOutputStream.java       |  6 ++--
 .../hadoop/hdfs/util/LightWeightHashSet.java    |  7 +++--
 .../apache/hadoop/hdfs/util/MD5FileUtils.java   |  8 ++---
 .../hadoop/hdfs/util/PersistentLongFile.java    |  8 ++---
 .../hdfs/web/resources/ExceptionHandler.java    |  7 +++--
 .../apache/hadoop/cli/TestCacheAdminCLI.java    |  7 +++--
 .../hadoop/fs/TestEnhancedByteBufferAccess.java | 10 +++---
 .../hadoop/fs/TestHdfsNativeCodeLoader.java     |  7 +++--
 .../java/org/apache/hadoop/fs/TestUnbuffer.java |  8 ++---
 .../apache/hadoop/hdfs/AdminStatesBaseTest.java |  7 +++--
 .../org/apache/hadoop/hdfs/AppendTestUtil.java  |  6 ++--
 .../apache/hadoop/hdfs/BenchmarkThroughput.java | 13 +++-----
 .../org/apache/hadoop/hdfs/DFSTestUtil.java     |  6 ++--
 .../org/apache/hadoop/hdfs/FileAppendTest4.java |  7 +++--
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |  9 +++---
 .../hdfs/MiniDFSClusterWithNodeGroup.java       |  7 +++--
 .../hdfs/ReadStripedFileWithDecodingHelper.java |  5 +--
 .../apache/hadoop/hdfs/TestAbandonBlock.java    |  7 +++--
 .../apache/hadoop/hdfs/TestAclsEndToEnd.java    |  8 ++---
 .../hadoop/hdfs/TestAppendSnapshotTruncate.java |  7 +++--
 .../hadoop/hdfs/TestBalancerBandwidth.java      |  7 +++--
 .../hadoop/hdfs/TestBlockMissingException.java  |  7 +++--
 .../hadoop/hdfs/TestClientReportBadBlock.java   |  8 ++---
 .../org/apache/hadoop/hdfs/TestConnCache.java   |  6 ++--
 .../hadoop/hdfs/TestDFSClientFailover.java      |  2 --
 .../hadoop/hdfs/TestDFSClientRetries.java       | 10 +++---
 .../org/apache/hadoop/hdfs/TestDFSFinalize.java |  6 ++--
 .../hdfs/TestDFSInotifyEventInputStream.java    |  6 ++--
 .../apache/hadoop/hdfs/TestDFSPermission.java   |  7 +++--
 .../org/apache/hadoop/hdfs/TestDFSRollback.java |  6 ++--
 .../org/apache/hadoop/hdfs/TestDFSShell.java    |  6 ++--
 .../hadoop/hdfs/TestDFSStartupVersions.java     |  6 ++--
 .../hdfs/TestDFSStorageStateRecovery.java       |  6 ++--
 .../hadoop/hdfs/TestDFSStripedInputStream.java  |  8 ++---
 ...DFSStripedInputStreamWithRandomECPolicy.java |  8 ++---
 .../hadoop/hdfs/TestDFSStripedOutputStream.java |  6 ++--
 ...tputStreamWithFailureWithRandomECPolicy.java |  8 ++---
 ...FSStripedOutputStreamWithRandomECPolicy.java |  8 ++---
 .../org/apache/hadoop/hdfs/TestDFSUpgrade.java  |  7 +++--
 .../hadoop/hdfs/TestDFSUpgradeFromImage.java    |  7 ++---
 .../org/apache/hadoop/hdfs/TestDataStream.java  |  6 ++--
 .../hadoop/hdfs/TestDataTransferProtocol.java   |  6 ++--
 .../hadoop/hdfs/TestDatanodeRegistration.java   |  7 +++--
 .../apache/hadoop/hdfs/TestDatanodeReport.java  |  6 ++--
 .../hadoop/hdfs/TestDisableConnCache.java       |  6 ++--
 .../hadoop/hdfs/TestEncryptedTransfer.java      | 17 +++++-----
 .../hadoop/hdfs/TestExternalBlockReader.java    |  8 ++---
 .../apache/hadoop/hdfs/TestHDFSServerPorts.java |  7 +++--
 .../org/apache/hadoop/hdfs/TestHDFSTrash.java   |  6 ++--
 .../hdfs/TestInjectionForSimulatedStorage.java  |  6 ++--
 .../org/apache/hadoop/hdfs/TestLargeBlock.java  |  7 +++--
 .../java/org/apache/hadoop/hdfs/TestLease.java  |  6 ++--
 .../apache/hadoop/hdfs/TestLeaseRecovery2.java  |  7 +++--
 .../hadoop/hdfs/TestMissingBlocksAlert.java     |  8 ++---
 .../hadoop/hdfs/TestParallelReadUtil.java       |  6 ++--
 .../org/apache/hadoop/hdfs/TestPipelines.java   |  6 ++--
 .../TestReadStripedFileWithMissingBlocks.java   |  8 ++---
 .../hadoop/hdfs/TestReconstructStripedFile.java |  7 +++--
 .../TestReplaceDatanodeFailureReplication.java  |  8 ++---
 .../hdfs/TestReplaceDatanodeOnFailure.java      |  7 +++--
 .../org/apache/hadoop/hdfs/TestReplication.java |  2 --
 .../apache/hadoop/hdfs/TestRollingUpgrade.java  |  7 +++--
 .../org/apache/hadoop/hdfs/TestSafeMode.java    |  6 ++--
 .../TestUnsetAndChangeDirectoryEcPolicy.java    |  8 ++---
 .../org/apache/hadoop/hdfs/TestWriteRead.java   |  8 ++---
 .../hadoop/hdfs/TestWriteReadStripedFile.java   | 19 ++++++-----
 .../hdfs/TestWriteStripedFileWithFailure.java   |  8 ++---
 .../hadoop/hdfs/net/TestDFSNetworkTopology.java |  8 ++---
 .../hadoop/hdfs/protocol/TestLocatedBlock.java  |  9 +++---
 .../datatransfer/sasl/TestSaslDataTransfer.java |  4 +--
 .../hdfs/qjournal/MiniJournalCluster.java       |  7 +++--
 .../hadoop/hdfs/qjournal/MiniQJMHACluster.java  |  7 +++--
 .../hdfs/qjournal/TestSecureNNWithQJM.java      |  2 --
 .../qjournal/client/TestEpochsAreUnique.java    |  7 +++--
 .../hdfs/qjournal/client/TestQJMWithFaults.java |  6 ++--
 .../client/TestQuorumJournalManager.java        | 16 +++++-----
 .../client/TestQuorumJournalManagerUnit.java    |  4 +--
 .../TestJournalNodeRespectsBindHostKeys.java    |  6 ++--
 .../hdfs/security/TestDelegationToken.java      | 11 ++++---
 .../TestDelegationTokenForProxyUser.java        |  7 +++--
 .../security/token/block/TestBlockToken.java    |  7 +++--
 .../hdfs/server/balancer/TestBalancer.java      | 12 +++----
 .../TestBalancerWithMultipleNameNodes.java      | 12 +++----
 .../balancer/TestBalancerWithNodeGroup.java     |  6 ++--
 .../BaseReplicationPolicyTest.java              |  3 +-
 .../server/blockmanagement/TestBlockInfo.java   |  8 ++---
 .../blockmanagement/TestBlockManager.java       | 12 +++----
 .../TestBlockReportRateLimiting.java            |  7 +++--
 .../TestBlocksWithNotEnoughRacks.java           |  7 +++--
 .../blockmanagement/TestCachedBlocksList.java   |  7 +++--
 .../blockmanagement/TestCorruptReplicaInfo.java |  6 ++--
 .../blockmanagement/TestDatanodeManager.java    |  7 +++--
 .../TestNameNodePrunesMissingStorages.java      |  7 +++--
 .../TestRBWBlockInvalidation.java               |  9 +++---
 .../TestSequentialBlockGroupId.java             |  8 ++---
 .../blockmanagement/TestSequentialBlockId.java  |  7 +++--
 .../server/common/TestGetUriFromString.java     |  7 +++--
 .../hdfs/server/datanode/DataNodeTestUtils.java |  8 ++---
 .../server/datanode/TestBPOfferService.java     |  6 ++--
 .../hdfs/server/datanode/TestBatchIbr.java      | 11 ++++---
 .../TestBlockHasMultipleReplicasOnSameDN.java   |  7 +++--
 .../server/datanode/TestBlockPoolManager.java   |  7 +++--
 .../hdfs/server/datanode/TestBlockRecovery.java | 13 ++++----
 .../server/datanode/TestBlockReplacement.java   |  6 ++--
 .../datanode/TestBpServiceActorScheduler.java   |  7 +++--
 .../TestDataNodeErasureCodingMetrics.java       |  8 ++---
 .../datanode/TestDataNodeFaultInjector.java     |  8 ++---
 .../datanode/TestDataNodeHotSwapVolumes.java    |  8 ++---
 .../datanode/TestDataNodeInitStorage.java       |  7 +++--
 .../server/datanode/TestDataNodeMXBean.java     |  7 +++--
 .../server/datanode/TestDataNodeMetrics.java    |  9 +++---
 .../datanode/TestDataNodeMetricsLogger.java     |  6 ++--
 .../TestDataNodeMultipleRegistrations.java      |  8 ++---
 .../datanode/TestDataNodeReconfiguration.java   |  7 +++--
 .../datanode/TestDataNodeRollingUpgrade.java    |  7 +++--
 .../server/datanode/TestDataNodeTcpNoDelay.java |  8 ++---
 .../TestDataNodeVolumeFailureReporting.java     | 14 ++++-----
 .../datanode/TestDataNodeVolumeMetrics.java     |  8 ++---
 .../TestDatanodeProtocolRetryPolicy.java        | 10 +++---
 .../server/datanode/TestDatanodeRegister.java   |  7 +++--
 .../server/datanode/TestDirectoryScanner.java   |  9 +++---
 ...TestDnRespectsBlockReportSplitThreshold.java |  7 +++--
 .../server/datanode/TestFsDatasetCache.java     | 14 ++++-----
 .../datanode/TestIncrementalBlockReports.java   |  7 +++--
 .../datanode/TestIncrementalBrVariations.java   | 21 +++++++------
 .../datanode/TestReadOnlySharedStorage.java     |  7 +++--
 .../hdfs/server/datanode/TestStorageReport.java |  7 +++--
 .../hdfs/server/datanode/TestTransferRbw.java   |  7 +++--
 .../fsdataset/impl/FsDatasetImplTestUtils.java  |  8 ++---
 .../fsdataset/impl/LazyPersistTestCase.java     |  7 +++--
 .../fsdataset/impl/TestSpaceReservation.java    |  6 ++--
 .../TestDiskBalancerWithMockMover.java          |  7 +++--
 .../hdfs/server/mover/TestStorageMover.java     | 22 ++++++-------
 .../hdfs/server/namenode/FSImageTestUtil.java   | 28 +++++------------
 .../server/namenode/NNThroughputBenchmark.java  |  9 +++---
 .../namenode/OfflineEditsViewerHelper.java      |  8 ++---
 .../hdfs/server/namenode/TestAddBlockRetry.java |  7 +++--
 .../hdfs/server/namenode/TestAllowFormat.java   |  8 ++---
 .../server/namenode/TestAuditLogAtDebug.java    |  6 ++--
 .../hdfs/server/namenode/TestBackupNode.java    | 13 ++++----
 .../server/namenode/TestCacheDirectives.java    |  6 ++--
 .../hdfs/server/namenode/TestCheckpoint.java    |  8 ++---
 .../hdfs/server/namenode/TestClusterId.java     |  9 +++---
 .../hdfs/server/namenode/TestDeadDatanode.java  |  7 +++--
 .../hdfs/server/namenode/TestEditLog.java       | 10 +++---
 .../namenode/TestEditLogFileInputStream.java    |  8 ++---
 .../hdfs/server/namenode/TestEditLogRace.java   | 11 ++++---
 .../hdfs/server/namenode/TestFSDirAttrOp.java   |  7 +++--
 .../hdfs/server/namenode/TestFSDirectory.java   |  7 +++--
 .../server/namenode/TestFSEditLogLoader.java    |  6 ++--
 .../namenode/TestFSImageWithSnapshot.java       |  4 +--
 .../namenode/TestFavoredNodesEndToEnd.java      |  8 ++---
 .../server/namenode/TestFileJournalManager.java | 23 ++++++++------
 .../hdfs/server/namenode/TestFileTruncate.java  | 12 +++----
 .../hadoop/hdfs/server/namenode/TestFsck.java   | 13 ++++----
 .../namenode/TestFsckWithMultipleNameNodes.java |  7 +++--
 .../hdfs/server/namenode/TestHDFSConcat.java    |  7 +++--
 .../hdfs/server/namenode/TestHostsFiles.java    |  8 ++---
 .../hdfs/server/namenode/TestINodeFile.java     |  8 ++---
 .../namenode/TestLargeDirectoryDelete.java      |  9 +++---
 .../hdfs/server/namenode/TestListOpenFiles.java |  9 +++---
 .../TestNNStorageRetentionFunctional.java       |  6 ++--
 .../server/namenode/TestNameEditsConfigs.java   |  6 ++--
 .../namenode/TestNameNodeMetricsLogger.java     |  6 ++--
 .../namenode/TestNameNodeReconfigure.java       |  8 ++---
 .../server/namenode/TestNameNodeRecovery.java   |  9 +++---
 .../TestNameNodeRespectsBindHostKeys.java       |  7 +++--
 .../namenode/TestNameNodeStatusMXBean.java      |  6 ++--
 .../namenode/TestNamenodeCapacityReport.java    |  7 +++--
 .../server/namenode/TestQuotaByStorageType.java | 33 ++++++++++----------
 .../hdfs/server/namenode/TestSaveNamespace.java |  4 +--
 .../hdfs/server/namenode/TestStartup.java       |  7 ++---
 .../server/namenode/TestStorageRestore.java     |  8 ++---
 .../server/namenode/TestStripedINodeFile.java   |  6 ++--
 .../hdfs/server/namenode/ha/HATestUtil.java     |  6 ++--
 .../namenode/ha/TestBootstrapStandby.java       | 13 ++++----
 .../namenode/ha/TestDFSUpgradeWithHA.java       |  7 +++--
 .../namenode/ha/TestDelegationTokensWithHA.java |  8 ++---
 .../server/namenode/ha/TestEditLogTailer.java   |  4 +--
 .../namenode/ha/TestEditLogsDuringFailover.java | 10 +++---
 .../namenode/ha/TestFailureToReadEdits.java     |  8 ++---
 .../hdfs/server/namenode/ha/TestHAFsck.java     |  4 +--
 .../hdfs/server/namenode/ha/TestHAMetrics.java  | 11 ++++---
 .../hdfs/server/namenode/ha/TestHASafeMode.java | 13 ++++----
 .../namenode/ha/TestHAStateTransitions.java     | 12 +++----
 .../namenode/ha/TestInitializeSharedEdits.java  |  7 +++--
 .../namenode/ha/TestRetryCacheWithHA.java       |  7 +++--
 .../namenode/ha/TestSeveralNameNodes.java       |  7 +++--
 .../namenode/ha/TestStandbyBlockManagement.java |  6 ++--
 .../namenode/ha/TestStandbyInProgressTail.java  |  8 ++---
 .../server/namenode/ha/TestStandbyIsHot.java    |  6 ++--
 .../namenode/metrics/TestNameNodeMetrics.java   | 11 ++++---
 ...tINodeFileUnderConstructionWithSnapshot.java |  6 ++--
 .../snapshot/TestOpenFilesWithSnapshot.java     |  8 ++---
 .../snapshot/TestRenameWithSnapshots.java       |  7 +++--
 .../server/namenode/snapshot/TestSnapshot.java  |  4 +--
 .../resources/TestWebHdfsCreatePermissions.java |  7 +++--
 .../web/resources/TestWebHdfsDataLocality.java  |  7 +++--
 .../sps/TestExternalStoragePolicySatisfier.java |  3 +-
 .../shortcircuit/TestShortCircuitCache.java     |  9 +++---
 .../apache/hadoop/hdfs/tools/TestDFSAdmin.java  |  6 ++--
 .../hadoop/hdfs/tools/TestDFSHAAdmin.java       |  7 +++--
 .../hdfs/tools/TestDFSHAAdminMiniCluster.java   | 13 ++++----
 .../TestOfflineEditsViewer.java                 |  8 ++---
 .../TestOfflineImageViewer.java                 |  9 +++---
 .../TestOfflineImageViewerForAcl.java           |  8 ++---
 ...TestOfflineImageViewerForContentSummary.java |  8 ++---
 .../TestOfflineImageViewerForXAttr.java         |  8 ++---
 .../hdfs/util/TestLightWeightHashSet.java       |  8 ++---
 .../hdfs/util/TestLightWeightLinkedSet.java     | 10 +++---
 .../hdfs/web/TestFSMainOperationsWebHdfs.java   |  4 +--
 .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 10 +++---
 .../hadoop/hdfs/web/TestWebHdfsTimeouts.java    | 18 ++++++-----
 .../web/TestWebHdfsWithMultipleNameNodes.java   | 12 +++----
 .../apache/hadoop/hdfs/web/WebHdfsTestUtil.java |  7 +++--
 .../hadoop/hdfs/web/resources/TestParam.java    |  6 ++--
 .../apache/hadoop/security/TestPermission.java  |  7 +++--
 .../hadoop/security/TestPermissionSymlinks.java |  7 +++--
 317 files changed, 1282 insertions(+), 1172 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index fb75322..61568d5 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -1872,7 +1872,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
       return PBHelperClient.convert(
           reply.getReadOpChecksumInfo().getChecksum().getType());
     } finally {
-      IOUtilsClient.cleanup(null, pair.in, pair.out);
+      IOUtilsClient.cleanupWithLogger(LOG, pair.in, pair.out);
     }
   }
 
@@ -2933,7 +2933,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
       return peer;
     } finally {
       if (!success) {
-        IOUtilsClient.cleanup(LOG, peer);
+        IOUtilsClient.cleanupWithLogger(LOG, peer);
         IOUtils.closeSocket(sock);
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
index 3fac7c8..8f785c7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
@@ -649,7 +649,7 @@ public class DFSUtilClient {
       return peer;
     } finally {
       if (!success) {
-        IOUtilsClient.cleanup(null, peer);
+        IOUtilsClient.cleanupWithLogger(LOG, peer);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PeerCache.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PeerCache.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PeerCache.java
index 1ddb42e..0580ed5 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PeerCache.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PeerCache.java
@@ -188,7 +188,7 @@ public class PeerCache {
     if (peer.isClosed()) return;
     if (capacity <= 0) {
       // Cache disabled.
-      IOUtilsClient.cleanup(LOG, peer);
+      IOUtilsClient.cleanupWithLogger(LOG, peer);
       return;
     }
     putInternal(dnId, peer);
@@ -221,7 +221,7 @@ public class PeerCache {
           Time.monotonicNow() - entry.getValue().getTime() < expiryPeriod) {
         break;
       }
-      IOUtilsClient.cleanup(LOG, entry.getValue().getPeer());
+      IOUtilsClient.cleanupWithLogger(LOG, entry.getValue().getPeer());
       iter.remove();
     }
   }
@@ -239,7 +239,7 @@ public class PeerCache {
         "capacity: " + capacity);
     }
     Entry<Key, Value> entry = iter.next();
-    IOUtilsClient.cleanup(LOG, entry.getValue().getPeer());
+    IOUtilsClient.cleanupWithLogger(LOG, entry.getValue().getPeer());
     iter.remove();
   }
 
@@ -267,7 +267,7 @@ public class PeerCache {
   @VisibleForTesting
   synchronized void clear() {
     for (Value value : multimap.values()) {
-      IOUtilsClient.cleanup(LOG, value.getPeer());
+      IOUtilsClient.cleanupWithLogger(LOG, value.getPeer());
     }
     multimap.clear();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
index ce43185..8e592f4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
@@ -549,14 +549,14 @@ public class BlockReaderFactory implements 
ShortCircuitReplicaCreator {
           // Handle an I/O error we got when using a cached socket.
           // These are considered less serious, because the socket may be 
stale.
           LOG.debug("{}: closing stale domain peer {}", this, peer, e);
-          IOUtilsClient.cleanup(LOG, peer);
+          IOUtilsClient.cleanupWithLogger(LOG, peer);
         } else {
           // Handle an I/O error we got when using a newly created socket.
           // We temporarily disable the domain socket path for a few minutes in
           // this case, to prevent wasting more time on it.
           LOG.warn(this + ": I/O error requesting file descriptors.  " +
               "Disabling domain socket " + peer.getDomainSocket(), e);
-          IOUtilsClient.cleanup(LOG, peer);
+          IOUtilsClient.cleanupWithLogger(LOG, peer);
           clientContext.getDomainSocketFactory()
               .disableDomainSocketPath(pathInfo.getPath());
           return null;
@@ -620,7 +620,7 @@ public class BlockReaderFactory implements 
ShortCircuitReplicaCreator {
         return null;
       } finally {
         if (replica == null) {
-          IOUtilsClient.cleanup(DFSClient.LOG, fis[0], fis[1]);
+          IOUtilsClient.cleanupWithLogger(DFSClient.LOG, fis[0], fis[1]);
         }
       }
     case ERROR_UNSUPPORTED:
@@ -692,7 +692,7 @@ public class BlockReaderFactory implements 
ShortCircuitReplicaCreator {
         blockReader = getRemoteBlockReader(peer);
         return blockReader;
       } catch (IOException ioe) {
-        IOUtilsClient.cleanup(LOG, peer);
+        IOUtilsClient.cleanupWithLogger(LOG, peer);
         if (isSecurityException(ioe)) {
           LOG.trace("{}: got security exception while constructing a remote "
                   + " block reader from the unix domain socket at {}",
@@ -715,7 +715,7 @@ public class BlockReaderFactory implements 
ShortCircuitReplicaCreator {
         }
       } finally {
         if (blockReader == null) {
-          IOUtilsClient.cleanup(LOG, peer);
+          IOUtilsClient.cleanupWithLogger(LOG, peer);
         }
       }
     }
@@ -766,7 +766,7 @@ public class BlockReaderFactory implements 
ShortCircuitReplicaCreator {
         }
       } finally {
         if (blockReader == null) {
-          IOUtilsClient.cleanup(LOG, peer);
+          IOUtilsClient.cleanupWithLogger(LOG, peer);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderLocalLegacy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderLocalLegacy.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderLocalLegacy.java
index e1e38c6..e48ace6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderLocalLegacy.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderLocalLegacy.java
@@ -686,7 +686,7 @@ class BlockReaderLocalLegacy implements BlockReader {
 
   @Override
   public synchronized void close() throws IOException {
-    IOUtilsClient.cleanup(LOG, dataIn, checksumIn);
+    IOUtilsClient.cleanupWithLogger(LOG, dataIn, checksumIn);
     if (slowReadBuff != null) {
       bufferPool.returnBuffer(slowReadBuff);
       slowReadBuff = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java
index 9c2d2e0..aa982d0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java
@@ -880,7 +880,7 @@ public class ShortCircuitCache implements Closeable {
       maxNonMmappedEvictableLifespanMs = 0;
       maxEvictableMmapedSize = 0;
       // Close and join cacheCleaner thread.
-      IOUtilsClient.cleanup(LOG, cacheCleaner);
+      IOUtilsClient.cleanupWithLogger(LOG, cacheCleaner);
       // Purge all replicas.
       while (true) {
         Object eldestKey;
@@ -931,7 +931,7 @@ public class ShortCircuitCache implements Closeable {
       LOG.error("Interrupted while waiting for CleanerThreadPool "
           + "to terminate", e);
     }
-    IOUtilsClient.cleanup(LOG, shmManager);
+    IOUtilsClient.cleanupWithLogger(LOG, shmManager);
   }
 
   @VisibleForTesting // ONLY for testing

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitReplica.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitReplica.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitReplica.java
index fd5dbfc..14116e2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitReplica.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitReplica.java
@@ -244,7 +244,7 @@ public class ShortCircuitReplica {
         suffix += "  munmapped.";
       }
     }
-    IOUtilsClient.cleanup(LOG, dataStream, metaStream);
+    IOUtilsClient.cleanupWithLogger(LOG, dataStream, metaStream);
     if (slot != null) {
       cache.scheduleSlotReleaser(slot);
       if (LOG.isTraceEnabled()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/IOUtilsClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/IOUtilsClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/IOUtilsClient.java
index 71596f3..85e9cee 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/IOUtilsClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/IOUtilsClient.java
@@ -31,7 +31,8 @@ public class IOUtilsClient {
    * @param log the log to record problems to at debug level. Can be null.
    * @param closeables the objects to close
    */
-  public static void cleanup(Logger log, java.io.Closeable... closeables) {
+  public static void cleanupWithLogger(Logger log,
+                                       java.io.Closeable... closeables) {
     for (java.io.Closeable c : closeables) {
       if (c != null) {
         try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
index f25797e..6067a5d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
@@ -1211,11 +1211,11 @@ class OpenFileCtx {
 
       LOG.info("Clean up open file context for fileId: {}",
           latestAttr.getFileId());
-      cleanupWithLogger();
+      cleanup();
     }
   }
 
-  synchronized void cleanupWithLogger() {
+  synchronized void cleanup() {
     if (!activeState) {
       LOG.info("Current OpenFileCtx is already inactive, no need to cleanup.");
       return;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtxCache.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtxCache.java
 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtxCache.java
index cb9e2c8..5c915d2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtxCache.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtxCache.java
@@ -139,7 +139,7 @@ class OpenFileCtxCache {
     
     // Cleanup the old stream outside the lock
     if (toEvict != null) {
-      toEvict.cleanupWithLogger();
+      toEvict.cleanup();
     }
     return true;
   }
@@ -179,7 +179,7 @@ class OpenFileCtxCache {
 
     // Invoke the cleanup outside the lock
     for (OpenFileCtx ofc : ctxToRemove) {
-      ofc.cleanupWithLogger();
+      ofc.cleanup();
     }
   }
 
@@ -215,7 +215,7 @@ class OpenFileCtxCache {
 
     // Invoke the cleanup outside the lock
     for (OpenFileCtx ofc : cleanedContext) {
-      ofc.cleanupWithLogger();
+      ofc.cleanup();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index 23f0478..6dd366f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -64,8 +64,8 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -99,7 +99,8 @@ import com.google.protobuf.BlockingService;
 
 @InterfaceAudience.Private
 public class DFSUtil {
-  public static final Log LOG = LogFactory.getLog(DFSUtil.class.getName());
+  public static final Logger LOG =
+      LoggerFactory.getLogger(DFSUtil.class.getName());
   
   private DFSUtil() { /* Hidden constructor */ }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsDtFetcher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsDtFetcher.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsDtFetcher.java
index 02aa4b9..4fcc319 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsDtFetcher.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsDtFetcher.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hdfs;
 import java.io.IOException;
 import java.net.URI;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +40,8 @@ import org.apache.hadoop.security.token.Token;
  *  compilation units.  Resolution of fetcher impl will be done at runtime.
  */
 public class HdfsDtFetcher implements DtFetcher {
-  private static final Log LOG = LogFactory.getLog(HdfsDtFetcher.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HdfsDtFetcher.class);
 
   private static final String SERVICE_NAME = HdfsConstants.HDFS_URI_SCHEME;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java
index b63d26b..bb555ef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java
@@ -25,8 +25,8 @@ import java.util.Map;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.NameNodeProxiesClient.ProxyAndInfo;
@@ -73,7 +73,8 @@ import 
org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolPB;
 @InterfaceAudience.Private
 public class NameNodeProxies {
   
-  private static final Log LOG = LogFactory.getLog(NameNodeProxies.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(NameNodeProxies.class);
 
   /**
    * Creates the namenode proxy with the passed protocol. This will handle

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SWebHdfsDtFetcher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SWebHdfsDtFetcher.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SWebHdfsDtFetcher.java
index 46f9b00..18dd720 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SWebHdfsDtFetcher.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SWebHdfsDtFetcher.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hdfs;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.hdfs.web.WebHdfsConstants;
 import org.apache.hadoop.io.Text;
@@ -28,7 +28,8 @@ import org.apache.hadoop.io.Text;
  *  DtFetcher for SWebHdfsFileSystem using the base class HdfsDtFetcher impl.
  */
 public class SWebHdfsDtFetcher extends HdfsDtFetcher {
-  private static final Log LOG = LogFactory.getLog(SWebHdfsDtFetcher.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SWebHdfsDtFetcher.class);
 
   private static final String SERVICE_NAME = WebHdfsConstants.SWEBHDFS_SCHEME;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/WebHdfsDtFetcher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/WebHdfsDtFetcher.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/WebHdfsDtFetcher.java
index c2bb852..e8ef5d7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/WebHdfsDtFetcher.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/WebHdfsDtFetcher.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hdfs;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.hdfs.web.WebHdfsConstants;
 import org.apache.hadoop.io.Text;
@@ -28,7 +28,8 @@ import org.apache.hadoop.io.Text;
  *  DtFetcher for WebHdfsFileSystem using the base class HdfsDtFetcher impl.
  */
 public class WebHdfsDtFetcher extends HdfsDtFetcher {
-  private static final Log LOG = LogFactory.getLog(WebHdfsDtFetcher.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(WebHdfsDtFetcher.class);
 
   private static final String SERVICE_NAME = WebHdfsConstants.WEBHDFS_SCHEME;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/DomainPeerServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/DomainPeerServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/DomainPeerServer.java
index 5425bd5..5d881d0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/DomainPeerServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/DomainPeerServer.java
@@ -21,14 +21,14 @@ package org.apache.hadoop.hdfs.net;
 import java.io.IOException;
 import java.net.SocketTimeoutException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.net.unix.DomainSocket;
 
 @InterfaceAudience.Private
 public class DomainPeerServer implements PeerServer {
-  static final Log LOG = LogFactory.getLog(DomainPeerServer.class);
+  static final Logger LOG = LoggerFactory.getLogger(DomainPeerServer.class);
   private final DomainSocket sock;
 
   DomainPeerServer(DomainSocket sock) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/TcpPeerServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/TcpPeerServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/TcpPeerServer.java
index 40d2b33..9fc6692 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/TcpPeerServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/TcpPeerServer.java
@@ -23,8 +23,8 @@ import java.net.ServerSocket;
 import java.net.SocketTimeoutException;
 import java.nio.channels.ServerSocketChannel;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.DFSUtilClient;
 import 
org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResources;
@@ -32,7 +32,7 @@ import org.apache.hadoop.ipc.Server;
 
 @InterfaceAudience.Private
 public class TcpPeerServer implements PeerServer {
-  static final Log LOG = LogFactory.getLog(TcpPeerServer.class);
+  static final Logger LOG = LoggerFactory.getLogger(TcpPeerServer.class);
 
   private final ServerSocket serverSocket;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/AsyncLoggerSet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/AsyncLoggerSet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/AsyncLoggerSet.java
index b52e312..6302b2a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/AsyncLoggerSet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/AsyncLoggerSet.java
@@ -23,8 +23,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetJournalStateResponseProto;
 import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.NewEpochResponseProto;
 import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
@@ -46,7 +46,7 @@ import com.google.common.util.concurrent.ListenableFuture;
  * {@link QuorumCall} instances.
  */
 class AsyncLoggerSet {
-  static final Log LOG = LogFactory.getLog(AsyncLoggerSet.class);
+  static final Logger LOG = LoggerFactory.getLogger(AsyncLoggerSet.class);
 
   private final List<AsyncLogger> loggers;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
index bd45292..ba2b20a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
@@ -30,8 +30,8 @@ import java.util.PriorityQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -65,7 +65,7 @@ import com.google.protobuf.TextFormat;
  */
 @InterfaceAudience.Private
 public class QuorumJournalManager implements JournalManager {
-  static final Log LOG = LogFactory.getLog(QuorumJournalManager.class);
+  static final Logger LOG = 
LoggerFactory.getLogger(QuorumJournalManager.class);
 
   // Timeouts for which the QJM will wait for each of the following actions.
   private final int startSegmentTimeoutMs;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/GetJournalEditServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/GetJournalEditServlet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/GetJournalEditServlet.java
index e967527..81b3f8c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/GetJournalEditServlet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/GetJournalEditServlet.java
@@ -32,8 +32,8 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.text.StringEscapeUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -67,7 +67,8 @@ import org.apache.hadoop.util.StringUtils;
 public class GetJournalEditServlet extends HttpServlet {
 
   private static final long serialVersionUID = -4635891628211723009L;
-  private static final Log LOG = 
LogFactory.getLog(GetJournalEditServlet.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(GetJournalEditServlet.class);
 
   static final String STORAGEINFO_PARAM = "storageInfo";
   static final String JOURNAL_ID_PARAM = "jid";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
index 7e88afa..39afabc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
@@ -32,8 +32,8 @@ import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.Range;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.qjournal.protocol.JournalNotFormattedException;
@@ -79,7 +79,7 @@ import com.google.protobuf.TextFormat;
  * the same JVM.
  */
 public class Journal implements Closeable {
-  static final Log LOG = LogFactory.getLog(Journal.class);
+  static final Logger LOG = LoggerFactory.getLogger(Journal.class);
 
 
   // Current writing state
@@ -1045,7 +1045,7 @@ public class Journal implements Closeable {
   public synchronized void doPreUpgrade() throws IOException {
     // Do not hold file lock on committedTxnId, because the containing
     // directory will be renamed.  It will be reopened lazily on next access.
-    IOUtils.cleanup(LOG, committedTxnId);
+    IOUtils.cleanupWithLogger(LOG, committedTxnId);
     storage.getJournalManager().doPreUpgrade();
   }
 
@@ -1087,7 +1087,7 @@ public class Journal implements Closeable {
       lastWriterEpoch.set(prevLastWriterEpoch.get());
       committedTxnId.set(prevCommittedTxnId.get());
     } finally {
-      IOUtils.cleanup(LOG, prevCommittedTxnId);
+      IOUtils.cleanupWithLogger(LOG, prevCommittedTxnId);
     }
   }
 
@@ -1109,7 +1109,7 @@ public class Journal implements Closeable {
   public synchronized void doRollback() throws IOException {
     // Do not hold file lock on committedTxnId, because the containing
     // directory will be renamed.  It will be reopened lazily on next access.
-    IOUtils.cleanup(LOG, committedTxnId);
+    IOUtils.cleanupWithLogger(LOG, committedTxnId);
     storage.getJournalManager().doRollback();
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
index 11a5c04..3df69f1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
@@ -21,8 +21,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
@@ -68,7 +68,7 @@ import java.util.Map;
  */
 @InterfaceAudience.Private
 public class JournalNode implements Tool, Configurable, JournalNodeMXBean {
-  public static final Log LOG = LogFactory.getLog(JournalNode.class);
+  public static final Logger LOG = LoggerFactory.getLogger(JournalNode.class);
   private Configuration conf;
   private JournalNodeRpcServer rpcServer;
   private JournalNodeHttpServer httpServer;
@@ -285,7 +285,7 @@ public class JournalNode implements Tool, Configurable, 
JournalNodeMXBean {
     }
     
     for (Journal j : journalsById.values()) {
-      IOUtils.cleanup(LOG, j);
+      IOUtils.cleanupWithLogger(LOG, j);
     }
 
     DefaultMetricsSystem.shutdown();
@@ -403,7 +403,7 @@ public class JournalNode implements Tool, Configurable, 
JournalNodeMXBean {
   private class ErrorReporter implements StorageErrorReporter {
     @Override
     public void reportErrorOnFile(File f) {
-      LOG.fatal("Error reported on file " + f + "... exiting",
+      LOG.error("Error reported on file " + f + "... exiting",
           new Exception());
       stop(1);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java
index 0f11026..bfa9a22 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.qjournal.server;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.BlockingService;
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -61,7 +61,7 @@ import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_JOURNALNODE_RPC_BIND_HOST
 @VisibleForTesting
 public class JournalNodeRpcServer implements QJournalProtocol,
     InterQJournalProtocol {
-  private static final Log LOG = JournalNode.LOG;
+  private static final Logger LOG = JournalNode.LOG;
   private static final int HANDLER_COUNT = 5;
   private final JournalNode jn;
   private Server server;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
index 3d0cf34..21fbbe4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
@@ -29,8 +29,8 @@ import java.util.Iterator;
 import java.util.Map;
 
 import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
@@ -58,7 +58,8 @@ import com.google.common.collect.Multiset;
 @InterfaceAudience.Private
 public class BlockTokenSecretManager extends
     SecretManager<BlockTokenIdentifier> {
-  public static final Log LOG = 
LogFactory.getLog(BlockTokenSecretManager.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(BlockTokenSecretManager.class);
 
   public static final Token<BlockTokenIdentifier> DUMMY_TOKEN = new 
Token<BlockTokenIdentifier>();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java
index 3547c96..ca0e643 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java
@@ -27,8 +27,8 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import 
org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection;
@@ -62,8 +62,8 @@ import com.google.protobuf.ByteString;
 public class DelegationTokenSecretManager
     extends AbstractDelegationTokenSecretManager<DelegationTokenIdentifier> {
 
-  private static final Log LOG = LogFactory
-      .getLog(DelegationTokenSecretManager.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(DelegationTokenSecretManager.class);
   
   private final FSNamesystem namesystem;
   private final SerializerCompat serializerCompat = new SerializerCompat();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
index 426c7ab..a58e391 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
@@ -36,8 +36,8 @@ import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -172,7 +172,7 @@ import com.google.common.base.Preconditions;
 
 @InterfaceAudience.Private
 public class Balancer {
-  static final Log LOG = LogFactory.getLog(Balancer.class);
+  static final Logger LOG = LoggerFactory.getLogger(Balancer.class);
 
   static final Path BALANCER_ID_PATH = new Path("/system/balancer.id");
 
@@ -724,7 +724,7 @@ public class Balancer {
       }
     } finally {
       for(NameNodeConnector nnc : connectors) {
-        IOUtils.cleanup(LOG, nnc);
+        IOUtils.cleanupWithLogger(LOG, nnc);
       }
     }
     return ExitStatus.SUCCESS.getExitCode();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java
index 060c013..8a71417 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java
@@ -44,8 +44,8 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.ThreadPoolExecutor;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.StorageType;
@@ -88,7 +88,7 @@ import com.google.common.base.Preconditions;
 /** Dispatching block replica moves between datanodes. */
 @InterfaceAudience.Private
 public class Dispatcher {
-  static final Log LOG = LogFactory.getLog(Dispatcher.class);
+  static final Logger LOG = LoggerFactory.getLogger(Dispatcher.class);
 
   /**
    * the period of time to delay the usage of a DataNode after hitting

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/NameNodeConnector.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/NameNodeConnector.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/NameNodeConnector.java
index 2b3c193..114167c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/NameNodeConnector.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/NameNodeConnector.java
@@ -31,8 +31,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -62,7 +62,8 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 public class NameNodeConnector implements Closeable {
-  private static final Log LOG = LogFactory.getLog(NameNodeConnector.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(NameNodeConnector.class);
 
   public static final int DEFAULT_MAX_IDLE_ITERATIONS = 5;
   private static boolean write2IdFile = true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/AvailableSpaceBlockPlacementPolicy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/AvailableSpaceBlockPlacementPolicy.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/AvailableSpaceBlockPlacementPolicy.java
index 8435b46..8f76e8b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/AvailableSpaceBlockPlacementPolicy.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/AvailableSpaceBlockPlacementPolicy.java
@@ -25,8 +25,8 @@ import java.util.Collection;
 import java.util.Random;
 
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -39,8 +39,8 @@ import org.apache.hadoop.net.Node;
  */
 public class AvailableSpaceBlockPlacementPolicy extends
     BlockPlacementPolicyDefault {
-  private static final Log LOG = LogFactory
-      .getLog(AvailableSpaceBlockPlacementPolicy.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(AvailableSpaceBlockPlacementPolicy.class);
   private static final Random RAND = new Random();
   private int balancedPreference =
       (int) (100 * 
DFS_NAMENODE_AVAILABLE_SPACE_BLOCK_PLACEMENT_POLICY_BALANCED_SPACE_PREFERENCE_FRACTION_DEFAULT);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
index 1131506..430c0d4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
@@ -24,8 +24,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.net.InetAddresses;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -73,7 +73,7 @@ import java.util.concurrent.TimeUnit;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class DatanodeManager {
-  static final Log LOG = LogFactory.getLog(DatanodeManager.class);
+  static final Logger LOG = LoggerFactory.getLogger(DatanodeManager.class);
 
   private final Namesystem namesystem;
   private final BlockManager blockManager;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostFileManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostFileManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostFileManager.java
index 59f907f..b7bf674 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostFileManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostFileManager.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
@@ -50,7 +50,8 @@ import java.util.HashSet;
  * resolutions are only done during the loading time to minimize the latency.
  */
 public class HostFileManager extends HostConfigManager {
-  private static final Log LOG = LogFactory.getLog(HostFileManager.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HostFileManager.class);
   private Configuration conf;
   private HostSet includes = new HostSet();
   private HostSet excludes = new HostSet();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
index 637c679..498a093 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hdfs.server.common;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
@@ -53,7 +53,7 @@ public class JspHelper {
   public static final String CURRENT_CONF = "current.conf";
   public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME;
   public static final String NAMENODE_ADDRESS = "nnaddr";
-  private static final Log LOG = LogFactory.getLog(JspHelper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(JspHelper.class);
 
   /** Private constructor for preventing creating JspHelper object. */
   private JspHelper() {}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/MetricsLoggerTask.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/MetricsLoggerTask.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/MetricsLoggerTask.java
index 40c048c..051e2d2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/MetricsLoggerTask.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/MetricsLoggerTask.java
@@ -32,8 +32,9 @@ import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
 
 import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.log4j.Appender;
 import org.apache.log4j.AsyncAppender;
@@ -43,7 +44,8 @@ import org.apache.log4j.AsyncAppender;
  */
 public class MetricsLoggerTask implements Runnable {
 
-  public static final Log LOG = LogFactory.getLog(MetricsLoggerTask.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(MetricsLoggerTask.class);
 
   private static ObjectName objectName = null;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Util.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Util.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Util.java
index 5dee16a..4e30e50 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Util.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Util.java
@@ -38,8 +38,8 @@ import java.util.Set;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -57,7 +57,8 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory;
 
 @InterfaceAudience.Private
 public final class Util {
-  private final static Log LOG = LogFactory.getLog(Util.class.getName());
+  private final static Logger LOG =
+      LoggerFactory.getLogger(Util.class.getName());
 
   public final static String FILE_LENGTH = "File-Length";
   public final static String CONTENT_LENGTH = "Content-Length";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 10951e9..99584d9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -39,8 +39,8 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.lang3.time.FastDateFormat;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.AutoCloseableLock;
@@ -60,7 +60,8 @@ import org.apache.hadoop.util.Time;
  */
 @InterfaceAudience.Private
 public class DirectoryScanner implements Runnable {
-  private static final Log LOG = LogFactory.getLog(DirectoryScanner.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DirectoryScanner.class);
   private static final int MILLIS_PER_SECOND = 1000;
   private static final String START_MESSAGE =
       "Periodic Directory Tree Verification scan"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java
index 83ee5f6..2da3b1e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hdfs.server.datanode;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -37,7 +37,8 @@ import java.util.concurrent.ThreadLocalRandom;
  */
 @InterfaceAudience.Private
 class ProfilingFileIoEvents {
-  static final Log LOG = LogFactory.getLog(ProfilingFileIoEvents.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(ProfilingFileIoEvents.class);
 
   private final boolean isEnabled;
   private final int sampleRangeMax;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java
index ea9e72c..3df83cf 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java
@@ -32,8 +32,8 @@ import java.util.Set;
 
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.InvalidRequestException;
 import org.apache.hadoop.hdfs.ExtendedBlockId;
@@ -81,7 +81,8 @@ import com.google.common.collect.HashMultimap;
  * The counterpart of this class on the client is {@link DfsClientShmManager}.
  */
 public class ShortCircuitRegistry {
-  public static final Log LOG = LogFactory.getLog(ShortCircuitRegistry.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(ShortCircuitRegistry.class);
 
   private static final int SHM_LENGTH = 8192;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/AvailableSpaceVolumeChoosingPolicy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/AvailableSpaceVolumeChoosingPolicy.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/AvailableSpaceVolumeChoosingPolicy.java
index efe222f..67a66fd 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/AvailableSpaceVolumeChoosingPolicy.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/AvailableSpaceVolumeChoosingPolicy.java
@@ -27,8 +27,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.StorageType;
@@ -46,7 +46,8 @@ import 
org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
 public class AvailableSpaceVolumeChoosingPolicy<V extends FsVolumeSpi>
     implements VolumeChoosingPolicy<V>, Configurable {
   
-  private static final Log LOG = 
LogFactory.getLog(AvailableSpaceVolumeChoosingPolicy.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AvailableSpaceVolumeChoosingPolicy.class);
 
   private Object[] syncLocks;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/RoundRobinVolumeChoosingPolicy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/RoundRobinVolumeChoosingPolicy.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/RoundRobinVolumeChoosingPolicy.java
index b9bcf1f..2d924c0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/RoundRobinVolumeChoosingPolicy.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/RoundRobinVolumeChoosingPolicy.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset;
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
 
@@ -31,7 +31,8 @@ import 
org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
  */
 public class RoundRobinVolumeChoosingPolicy<V extends FsVolumeSpi>
     implements VolumeChoosingPolicy<V> {
-  public static final Log LOG = 
LogFactory.getLog(RoundRobinVolumeChoosingPolicy.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(RoundRobinVolumeChoosingPolicy.class);
 
   // curVolumes stores the RR counters of each storage type.
   // The ordinal of storage type in org.apache.hadoop.fs.StorageType

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
index 3f9de78..2adfb6b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
@@ -32,8 +32,8 @@ import java.util.Iterator;
 import java.util.Scanner;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CachingGetSpaceUsed;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -71,7 +71,7 @@ import com.google.common.annotations.VisibleForTesting;
  * This class is synchronized by {@link FsVolumeImpl}.
  */
 class BlockPoolSlice {
-  static final Log LOG = LogFactory.getLog(BlockPoolSlice.class);
+  static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class);
 
   private final String bpid;
   private final FsVolumeImpl volume; // volume to which this BlockPool belongs 
to
@@ -764,7 +764,7 @@ class BlockPoolSlice {
     }
 
     if (dfsUsage instanceof CachingGetSpaceUsed) {
-      IOUtils.cleanup(LOG, ((CachingGetSpaceUsed) dfsUsage));
+      IOUtils.cleanupWithLogger(LOG, ((CachingGetSpaceUsed) dfsUsage));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java
index 4929b5e..81213a0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java
@@ -30,8 +30,8 @@ import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DatanodeUtil;
@@ -59,7 +59,8 @@ import org.apache.hadoop.io.nativeio.NativeIOException;
  * They should be combined.
  */
 class FsDatasetAsyncDiskService {
-  public static final Log LOG = 
LogFactory.getLog(FsDatasetAsyncDiskService.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(FsDatasetAsyncDiskService.class);
   
   // ThreadPool core pool size
   private static final int CORE_THREADS_PER_VOLUME = 1;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskAsyncLazyPersistService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskAsyncLazyPersistService.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskAsyncLazyPersistService.java
index d6969c4..a77faf2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskAsyncLazyPersistService.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskAsyncLazyPersistService.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSUtilClient;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -45,7 +45,8 @@ import java.util.concurrent.TimeUnit;
  * They should be combined.
  */
 class RamDiskAsyncLazyPersistService {
-  public static final Log LOG = 
LogFactory.getLog(RamDiskAsyncLazyPersistService.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(RamDiskAsyncLazyPersistService.class);
 
   // ThreadPool core pool size
   private static final int CORE_THREADS_PER_VOLUME = 1;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eca1a4bf/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
index 335ed70..07e5201 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
 
 import com.google.common.base.Preconditions;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -36,7 +36,8 @@ import java.util.concurrent.atomic.AtomicLong;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public abstract class RamDiskReplicaTracker {
-  static final Log LOG = LogFactory.getLog(RamDiskReplicaTracker.class);
+  static final Logger LOG =
+      LoggerFactory.getLogger(RamDiskReplicaTracker.class);
 
   FsDatasetImpl fsDataset;
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to