Author: jing9
Date: Thu Jan 30 19:22:29 2014
New Revision: 1562929

URL: http://svn.apache.org/r1562929
Log:
HDFS-5843. Merge change r1562927 from trunk.

Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1562929&r1=1562928&r2=1562929&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
Thu Jan 30 19:22:29 2014
@@ -18,6 +18,9 @@ Release 2.4.0 - UNRELEASED
     HDFS-5492. Port HDFS-2069 (Incorrect default trash interval in the
     docs) to trunk. (Akira Ajisaka via Arpit Agarwal)
 
+    HDFS-5843. DFSClient.getFileChecksum() throws IOException if checksum is 
+    disabled. (Laurent Goujon via jing9)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java?rev=1562929&r1=1562928&r2=1562929&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
 Thu Jan 30 19:22:29 2014
@@ -655,8 +655,9 @@ class DataXceiver extends Receiver imple
       final BlockMetadataHeader header = 
BlockMetadataHeader.readHeader(checksumIn);
       final DataChecksum checksum = header.getChecksum(); 
       final int bytesPerCRC = checksum.getBytesPerChecksum();
-      final long crcPerBlock = (metadataIn.getLength()
-          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
+      final long crcPerBlock = checksum.getChecksumSize() > 0 
+              ? (metadataIn.getLength() - 
BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize()
+              : 0;
       
       //compute block checksum
       final MD5Hash md5 = MD5Hash.digest(checksumIn);

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java?rev=1562929&r1=1562928&r2=1562929&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java
 Thu Jan 30 19:22:29 2014
@@ -71,7 +71,7 @@ public class TestFSOutputSummer {
     cleanupFile(name);
   }
   
-  /* create a file, write data with vairable amount of data */
+  /* create a file, write data with variable amount of data */
   private void writeFile3(Path name) throws Exception {
     FSDataOutputStream stm = fileSys.create(name, true, 
         fileSys.getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, 4096),
@@ -103,6 +103,8 @@ public class TestFSOutputSummer {
     stm.readFully(0, actual);
     checkAndEraseData(actual, 0, expected, "Read Sanity Test");
     stm.close();
+    // do a sanity check. Get the file checksum
+    fileSys.getFileChecksum(name);
   }
 
   private void cleanupFile(Path name) throws IOException {
@@ -112,13 +114,20 @@ public class TestFSOutputSummer {
   }
   
   /**
-   * Test write opeation for output stream in DFS.
+   * Test write operation for output stream in DFS.
    */
   @Test
   public void testFSOutputSummer() throws Exception {
+    doTestFSOutputSummer("CRC32");
+    doTestFSOutputSummer("CRC32C");
+    doTestFSOutputSummer("NULL");
+  }
+  
+  private void doTestFSOutputSummer(String checksumType) throws Exception {
     Configuration conf = new HdfsConfiguration();
     conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
     conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, BYTES_PER_CHECKSUM);
+    conf.set(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, checksumType);
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
                                                .numDataNodes(NUM_OF_DATANODES)
                                                .build();


Reply via email to