[19/50] [abbrv] hadoop git commit: HDFS-7819. Log WARN message for the blocks which are not in Block ID based layout (Rakesh R via Colin P. McCabe)

2015-03-02 Thread zhz
HDFS-7819. Log WARN message for the blocks which are not in Block ID based 
layout (Rakesh R via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/da85e17c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/da85e17c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/da85e17c

Branch: refs/heads/HDFS-7285
Commit: da85e17c772dfd2348b8f0e93583f7e7ac4d4e16
Parents: db0b6e6
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Thu Feb 26 11:58:29 2015 -0800
Committer: Zhe Zhang zhezh...@cloudera.com
Committed: Mon Mar 2 09:13:52 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/server/datanode/DirectoryScanner.java  | 26 +---
 2 files changed, 25 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/da85e17c/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e09714f..54b4057 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -679,6 +679,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
 (vinayakumarb)
 
+HDFS-7819. Log WARN message for the blocks which are not in Block ID based
+layout (Rakesh R via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/da85e17c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 71f976b..09c2914 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -597,14 +597,15 @@ public class DirectoryScanner implements Runnable {
   for (String bpid : bpList) {
 LinkedListScanInfo report = new LinkedListScanInfo();
 File bpFinalizedDir = volume.getFinalizedDir(bpid);
-result.put(bpid, compileReport(volume, bpFinalizedDir, report));
+result.put(bpid,
+compileReport(volume, bpFinalizedDir, bpFinalizedDir, report));
   }
   return result;
 }
 
 /** Compile list {@link ScanInfo} for the blocks in the directory dir */
-private LinkedListScanInfo compileReport(FsVolumeSpi vol, File dir,
-LinkedListScanInfo report) {
+private LinkedListScanInfo compileReport(FsVolumeSpi vol,
+File bpFinalizedDir, File dir, LinkedListScanInfo report) {
   File[] files;
   try {
 files = FileUtil.listFiles(dir);
@@ -622,12 +623,14 @@ public class DirectoryScanner implements Runnable {
*/
   for (int i = 0; i  files.length; i++) {
 if (files[i].isDirectory()) {
-  compileReport(vol, files[i], report);
+  compileReport(vol, bpFinalizedDir, files[i], report);
   continue;
 }
 if (!Block.isBlockFilename(files[i])) {
   if (isBlockMetaFile(blk_, files[i].getName())) {
 long blockId = Block.getBlockId(files[i].getName());
+verifyFileLocation(files[i].getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, null, files[i], vol));
   }
   continue;
@@ -646,9 +649,24 @@ public class DirectoryScanner implements Runnable {
 break;
   }
 }
+verifyFileLocation(blockFile.getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, blockFile, metaFile, vol));
   }
   return report;
 }
+
+/**
+ * Verify whether the actual directory location of block file has the
+ * expected directory path computed using its block ID.
+ */
+private void verifyFileLocation(File actualBlockDir,
+File bpFinalizedDir, long blockId) {
+  File blockDir = DatanodeUtil.idToBlockDir(bpFinalizedDir, blockId);
+  if (actualBlockDir.compareTo(blockDir) != 0) {
+LOG.warn(Block:  + blockId
++  has to be upgraded to block ID-based layout);
+  }
+}
   }
 }



hadoop git commit: HDFS-7819. Log WARN message for the blocks which are not in Block ID based layout (Rakesh R via Colin P. McCabe)

2015-02-26 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk dce8b9c4d - f0c980abe


HDFS-7819. Log WARN message for the blocks which are not in Block ID based 
layout (Rakesh R via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f0c980ab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f0c980ab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f0c980ab

Branch: refs/heads/trunk
Commit: f0c980abed3843923e0eb16b626fa27334195eda
Parents: dce8b9c
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Thu Feb 26 11:58:29 2015 -0800
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Thu Feb 26 11:58:29 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/server/datanode/DirectoryScanner.java  | 26 +---
 2 files changed, 25 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e09714f..54b4057 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -679,6 +679,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
 (vinayakumarb)
 
+HDFS-7819. Log WARN message for the blocks which are not in Block ID based
+layout (Rakesh R via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 71f976b..09c2914 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -597,14 +597,15 @@ public class DirectoryScanner implements Runnable {
   for (String bpid : bpList) {
 LinkedListScanInfo report = new LinkedListScanInfo();
 File bpFinalizedDir = volume.getFinalizedDir(bpid);
-result.put(bpid, compileReport(volume, bpFinalizedDir, report));
+result.put(bpid,
+compileReport(volume, bpFinalizedDir, bpFinalizedDir, report));
   }
   return result;
 }
 
 /** Compile list {@link ScanInfo} for the blocks in the directory dir */
-private LinkedListScanInfo compileReport(FsVolumeSpi vol, File dir,
-LinkedListScanInfo report) {
+private LinkedListScanInfo compileReport(FsVolumeSpi vol,
+File bpFinalizedDir, File dir, LinkedListScanInfo report) {
   File[] files;
   try {
 files = FileUtil.listFiles(dir);
@@ -622,12 +623,14 @@ public class DirectoryScanner implements Runnable {
*/
   for (int i = 0; i  files.length; i++) {
 if (files[i].isDirectory()) {
-  compileReport(vol, files[i], report);
+  compileReport(vol, bpFinalizedDir, files[i], report);
   continue;
 }
 if (!Block.isBlockFilename(files[i])) {
   if (isBlockMetaFile(blk_, files[i].getName())) {
 long blockId = Block.getBlockId(files[i].getName());
+verifyFileLocation(files[i].getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, null, files[i], vol));
   }
   continue;
@@ -646,9 +649,24 @@ public class DirectoryScanner implements Runnable {
 break;
   }
 }
+verifyFileLocation(blockFile.getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, blockFile, metaFile, vol));
   }
   return report;
 }
+
+/**
+ * Verify whether the actual directory location of block file has the
+ * expected directory path computed using its block ID.
+ */
+private void verifyFileLocation(File actualBlockDir,
+File bpFinalizedDir, long blockId) {
+  File blockDir = DatanodeUtil.idToBlockDir(bpFinalizedDir, blockId);
+  if (actualBlockDir.compareTo(blockDir) != 0) {
+LOG.warn(Block:  + blockId
++  has to be upgraded to block ID-based layout);
+  }
+}
   }
 }



hadoop git commit: HDFS-7819. Log WARN message for the blocks which are not in Block ID based layout (Rakesh R via Colin P. McCabe)

2015-02-26 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 dc348f489 - 8b3b9568b


HDFS-7819. Log WARN message for the blocks which are not in Block ID based 
layout (Rakesh R via Colin P. McCabe)

(cherry picked from commit f0c980abed3843923e0eb16b626fa27334195eda)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8b3b9568
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8b3b9568
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8b3b9568

Branch: refs/heads/branch-2
Commit: 8b3b9568b684820800f59eae3e48e0d058a8f21b
Parents: dc348f4
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Thu Feb 26 11:58:29 2015 -0800
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Thu Feb 26 12:03:11 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/server/datanode/DirectoryScanner.java  | 26 +---
 2 files changed, 25 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b3b9568/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f59bb71..79cf934 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -381,6 +381,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
 (vinayakumarb)
 
+HDFS-7819. Log WARN message for the blocks which are not in Block ID based
+layout (Rakesh R via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b3b9568/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 71f976b..09c2914 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -597,14 +597,15 @@ public class DirectoryScanner implements Runnable {
   for (String bpid : bpList) {
 LinkedListScanInfo report = new LinkedListScanInfo();
 File bpFinalizedDir = volume.getFinalizedDir(bpid);
-result.put(bpid, compileReport(volume, bpFinalizedDir, report));
+result.put(bpid,
+compileReport(volume, bpFinalizedDir, bpFinalizedDir, report));
   }
   return result;
 }
 
 /** Compile list {@link ScanInfo} for the blocks in the directory dir */
-private LinkedListScanInfo compileReport(FsVolumeSpi vol, File dir,
-LinkedListScanInfo report) {
+private LinkedListScanInfo compileReport(FsVolumeSpi vol,
+File bpFinalizedDir, File dir, LinkedListScanInfo report) {
   File[] files;
   try {
 files = FileUtil.listFiles(dir);
@@ -622,12 +623,14 @@ public class DirectoryScanner implements Runnable {
*/
   for (int i = 0; i  files.length; i++) {
 if (files[i].isDirectory()) {
-  compileReport(vol, files[i], report);
+  compileReport(vol, bpFinalizedDir, files[i], report);
   continue;
 }
 if (!Block.isBlockFilename(files[i])) {
   if (isBlockMetaFile(blk_, files[i].getName())) {
 long blockId = Block.getBlockId(files[i].getName());
+verifyFileLocation(files[i].getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, null, files[i], vol));
   }
   continue;
@@ -646,9 +649,24 @@ public class DirectoryScanner implements Runnable {
 break;
   }
 }
+verifyFileLocation(blockFile.getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, blockFile, metaFile, vol));
   }
   return report;
 }
+
+/**
+ * Verify whether the actual directory location of block file has the
+ * expected directory path computed using its block ID.
+ */
+private void verifyFileLocation(File actualBlockDir,
+File bpFinalizedDir, long blockId) {
+  File blockDir = DatanodeUtil.idToBlockDir(bpFinalizedDir, blockId);
+  if (actualBlockDir.compareTo(blockDir) != 0) {
+LOG.warn(Block:  + blockId
++  has to be upgraded to block ID-based layout);
+  }
+}
  

[12/17] hadoop git commit: HDFS-7819. Log WARN message for the blocks which are not in Block ID based layout (Rakesh R via Colin P. McCabe)

2015-02-26 Thread zjshen
HDFS-7819. Log WARN message for the blocks which are not in Block ID based 
layout (Rakesh R via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f0c980ab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f0c980ab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f0c980ab

Branch: refs/heads/YARN-2928
Commit: f0c980abed3843923e0eb16b626fa27334195eda
Parents: dce8b9c
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Thu Feb 26 11:58:29 2015 -0800
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Thu Feb 26 11:58:29 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/server/datanode/DirectoryScanner.java  | 26 +---
 2 files changed, 25 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e09714f..54b4057 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -679,6 +679,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
 (vinayakumarb)
 
+HDFS-7819. Log WARN message for the blocks which are not in Block ID based
+layout (Rakesh R via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 71f976b..09c2914 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -597,14 +597,15 @@ public class DirectoryScanner implements Runnable {
   for (String bpid : bpList) {
 LinkedListScanInfo report = new LinkedListScanInfo();
 File bpFinalizedDir = volume.getFinalizedDir(bpid);
-result.put(bpid, compileReport(volume, bpFinalizedDir, report));
+result.put(bpid,
+compileReport(volume, bpFinalizedDir, bpFinalizedDir, report));
   }
   return result;
 }
 
 /** Compile list {@link ScanInfo} for the blocks in the directory dir */
-private LinkedListScanInfo compileReport(FsVolumeSpi vol, File dir,
-LinkedListScanInfo report) {
+private LinkedListScanInfo compileReport(FsVolumeSpi vol,
+File bpFinalizedDir, File dir, LinkedListScanInfo report) {
   File[] files;
   try {
 files = FileUtil.listFiles(dir);
@@ -622,12 +623,14 @@ public class DirectoryScanner implements Runnable {
*/
   for (int i = 0; i  files.length; i++) {
 if (files[i].isDirectory()) {
-  compileReport(vol, files[i], report);
+  compileReport(vol, bpFinalizedDir, files[i], report);
   continue;
 }
 if (!Block.isBlockFilename(files[i])) {
   if (isBlockMetaFile(blk_, files[i].getName())) {
 long blockId = Block.getBlockId(files[i].getName());
+verifyFileLocation(files[i].getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, null, files[i], vol));
   }
   continue;
@@ -646,9 +649,24 @@ public class DirectoryScanner implements Runnable {
 break;
   }
 }
+verifyFileLocation(blockFile.getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, blockFile, metaFile, vol));
   }
   return report;
 }
+
+/**
+ * Verify whether the actual directory location of block file has the
+ * expected directory path computed using its block ID.
+ */
+private void verifyFileLocation(File actualBlockDir,
+File bpFinalizedDir, long blockId) {
+  File blockDir = DatanodeUtil.idToBlockDir(bpFinalizedDir, blockId);
+  if (actualBlockDir.compareTo(blockDir) != 0) {
+LOG.warn(Block:  + blockId
++  has to be upgraded to block ID-based layout);
+  }
+}
   }
 }