Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java
 Fri Apr 18 16:32:35 2014
@@ -58,7 +58,7 @@ public class TestSecureNameNode {
       Configuration conf = new HdfsConfiguration();
       conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
           "kerberos");
-      conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY,
+      conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY,
           "nn1/localh...@example.com");
       conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, nn1KeytabPath);
 

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java
 Fri Apr 18 16:32:35 2014
@@ -80,8 +80,8 @@ public class TestSecureNameNodeWithExter
       Configuration conf = new HdfsConfiguration();
       conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
           "kerberos");
-      conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, nnPrincipal);
-      conf.set(DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY,
+      conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, nnPrincipal);
+      
conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
           nnSpnegoPrincipal);
       conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, nnKeyTab);
 

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java
 Fri Apr 18 16:32:35 2014
@@ -17,12 +17,10 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 import java.io.File;
 import java.io.IOException;
 import java.net.BindException;
+import java.util.Random;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -55,16 +53,26 @@ public class TestValidateConfigurationSe
   public void testThatMatchingRPCandHttpPortsThrowException() 
       throws IOException {
 
-    Configuration conf = new HdfsConfiguration();
-    File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name");
-    conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
-        nameDir.getAbsolutePath());
-
-    // set both of these to port 9000, should fail
-    FileSystem.setDefaultUri(conf, "hdfs://localhost:9000"); 
-    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:9000");
-    DFSTestUtil.formatNameNode(conf);
-    new NameNode(conf);
+    NameNode nameNode = null;
+    try {
+      Configuration conf = new HdfsConfiguration();
+      File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name");
+      conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
+          nameDir.getAbsolutePath());
+
+      Random rand = new Random();
+      final int port = 30000 + rand.nextInt(30000);
+
+      // set both of these to the same port. It should fail.
+      FileSystem.setDefaultUri(conf, "hdfs://localhost:" + port);
+      conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:" + 
port);
+      DFSTestUtil.formatNameNode(conf);
+      nameNode = new NameNode(conf);
+    } finally {
+      if (nameNode != null) {
+        nameNode.stop();
+      }
+    }
   }
 
   /**
@@ -80,11 +88,29 @@ public class TestValidateConfigurationSe
     conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
         nameDir.getAbsolutePath());
 
-    FileSystem.setDefaultUri(conf, "hdfs://localhost:8000");
-    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:9000");
-    DFSTestUtil.formatNameNode(conf);
-    NameNode nameNode = new NameNode(conf); // should be OK!
-    nameNode.stop();
+    Random rand = new Random();
+
+    // A few retries in case the ports we choose are in use.
+    for (int i = 0; i < 5; ++i) {
+      final int port1 = 30000 + rand.nextInt(10000);
+      final int port2 = port1 + 1 + rand.nextInt(10000);
+
+      FileSystem.setDefaultUri(conf, "hdfs://localhost:" + port1);
+      conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:" + 
port2);
+      DFSTestUtil.formatNameNode(conf);
+      NameNode nameNode = null;
+
+      try {
+        nameNode = new NameNode(conf); // should be OK!
+        break;
+      } catch(BindException be) {
+        continue;     // Port in use? Try another.
+      } finally {
+        if (nameNode != null) {
+          nameNode.stop();
+        }
+      }
+    }
   }
 
   /**

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
 Fri Apr 18 16:32:35 2014
@@ -99,6 +99,9 @@ public class TestDelegationTokensWithHA 
       .build();
     cluster.waitActive();
     
+    String logicalName = HATestUtil.getLogicalHostname(cluster);
+    HATestUtil.setFailoverConfigurations(cluster, conf, logicalName, 0);
+
     nn0 = cluster.getNameNode(0);
     nn1 = cluster.getNameNode(1);
     fs = HATestUtil.configureFailoverFs(cluster, conf);
@@ -246,8 +249,7 @@ public class TestDelegationTokensWithHA 
     doRenewOrCancel(token, clientConf, TokenTestAction.RENEW);
     doRenewOrCancel(token, clientConf, TokenTestAction.CANCEL);
   }
-  
-  @SuppressWarnings("deprecation")
+
   @Test
   public void testDelegationTokenWithDoAs() throws Exception {
     final Token<DelegationTokenIdentifier> token =
@@ -259,29 +261,22 @@ public class TestDelegationTokensWithHA 
     longUgi.doAs(new PrivilegedExceptionAction<Void>() {
       @Override
       public Void run() throws Exception {
-        DistributedFileSystem dfs = (DistributedFileSystem)
-            HATestUtil.configureFailoverFs(cluster, conf);
         // try renew with long name
-        dfs.renewDelegationToken(token);
+        token.renew(conf);
         return null;
       }
     });
     shortUgi.doAs(new PrivilegedExceptionAction<Void>() {
       @Override
       public Void run() throws Exception {
-        DistributedFileSystem dfs = (DistributedFileSystem)
-            HATestUtil.configureFailoverFs(cluster, conf);
-        dfs.renewDelegationToken(token);
+        token.renew(conf);
         return null;
       }
     });
     longUgi.doAs(new PrivilegedExceptionAction<Void>() {
       @Override
       public Void run() throws Exception {
-        DistributedFileSystem dfs = (DistributedFileSystem)
-            HATestUtil.configureFailoverFs(cluster, conf);
-        // try cancel with long name
-        dfs.cancelDelegationToken(token);
+        token.cancel(conf);;
         return null;
       }
     });

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyCheckpoints.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyCheckpoints.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyCheckpoints.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyCheckpoints.java
 Fri Apr 18 16:32:35 2014
@@ -25,6 +25,9 @@ import static org.junit.Assert.fail;
 import java.io.File;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.lang.management.ManagementFactory;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
 import java.net.URI;
 import java.net.URL;
 import java.util.List;
@@ -59,6 +62,7 @@ import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mockito;
 
+import com.google.common.base.Supplier;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Lists;
@@ -270,6 +274,29 @@ public class TestStandbyCheckpoints {
     HATestUtil.waitForCheckpoint(cluster, 1, ImmutableList.of(104));
     cluster.transitionToStandby(0);
     cluster.transitionToActive(1);
+
+    // Wait to make sure background TransferFsImageUpload thread was cancelled.
+    // This needs to be done before the next test in the suite starts, so that 
a
+    // file descriptor is not held open during the next cluster init.
+    cluster.shutdown();
+    cluster = null;
+    GenericTestUtils.waitFor(new Supplier<Boolean>() {
+      @Override
+      public Boolean get() {
+        ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
+        ThreadInfo[] threads = threadBean.getThreadInfo(
+          threadBean.getAllThreadIds(), 1);
+        for (ThreadInfo thread: threads) {
+          if (thread.getThreadName().startsWith("TransferFsImageUpload")) {
+            return false;
+          }
+        }
+        return true;
+      }
+    }, 1000, 30000);
+
+    // Assert that former active did not accept the canceled checkpoint file.
+    assertEquals(0, nn0.getFSImage().getMostRecentCheckpointTxId());
   }
   
   /**

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java
 Fri Apr 18 16:32:35 2014
@@ -243,7 +243,7 @@ public class TestSnapshot {
   }
   
   /**
-   * Test if the OfflineImageViewer can correctly parse a fsimage containing
+   * Test if the OfflineImageViewerPB can correctly parse a fsimage containing
    * snapshots
    */
   @Test

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotStatsMXBean.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotStatsMXBean.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotStatsMXBean.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotStatsMXBean.java
 Fri Apr 18 16:32:35 2014
@@ -59,17 +59,20 @@ public class TestSnapshotStatsMXBean {
       ObjectName mxbeanName = new ObjectName(
           "Hadoop:service=NameNode,name=SnapshotInfo");
 
-      CompositeData statsbean =
-          (CompositeData) mbs.getAttribute(mxbeanName, "SnapshotStats");
-      int numDirectories = Array.getLength(statsbean.get("directory"));
+      CompositeData[] directories =
+          (CompositeData[]) mbs.getAttribute(
+              mxbeanName, "SnapshottableDirectories");
+      int numDirectories = Array.getLength(directories);
       assertEquals(sm.getNumSnapshottableDirs(), numDirectories);
-      int numSnapshots = Array.getLength(statsbean.get("snapshots"));
+      CompositeData[] snapshots =
+          (CompositeData[]) mbs.getAttribute(mxbeanName, "Snapshots");
+      int numSnapshots = Array.getLength(snapshots);
       assertEquals(sm.getNumSnapshots(), numSnapshots);
 
-      CompositeData directory = (CompositeData) 
Array.get(statsbean.get("directory"), 0);
-      CompositeData snapshots = (CompositeData) 
Array.get(statsbean.get("snapshots"), 0);
-      assertTrue(((String) directory.get("path")).contains(pathName));
-      assertTrue(((String) 
snapshots.get("snapshotDirectory")).contains(pathName));
+      CompositeData d = (CompositeData) Array.get(directories, 0);
+      CompositeData s = (CompositeData) Array.get(snapshots, 0);
+      assertTrue(((String) d.get("path")).contains(pathName));
+      assertTrue(((String) s.get("snapshotDirectory")).contains(pathName));
     } finally {
       if (cluster != null) {
         cluster.shutdown();

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TestOfflineEditsViewer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TestOfflineEditsViewer.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TestOfflineEditsViewer.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TestOfflineEditsViewer.java
 Fri Apr 18 16:32:35 2014
@@ -21,11 +21,13 @@ package org.apache.hadoop.hdfs.tools.off
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hdfs.DFSTestUtil;
@@ -166,7 +168,8 @@ public class TestOfflineEditsViewer {
     assertTrue("Edits " + editsStored + " should have all op codes",
         hasAllOpCodes(editsStored));
     assertTrue("Reference XML edits and parsed to XML should be same",
-        filesEqual(editsStoredXml, editsStoredParsedXml));
+        FileUtils.contentEqualsIgnoreEOL(new File(editsStoredXml),
+          new File(editsStoredParsedXml), "UTF-8"));
     assertTrue(
         "Reference edits and reparsed (bin to XML to bin) should be same",
         filesEqualIgnoreTrailingZeros(editsStored, editsStoredReparsed));
@@ -270,26 +273,4 @@ public class TestOfflineEditsViewer {
 
     return true;
   }
-
-  /**
-   * Compare two files, throw exception is they are not same
-   *
-   * @param filename1 first file to compare
-   * @param filename2 second file to compare
-   */
-  private boolean filesEqual(String filename1,
-    String filename2) throws IOException {
-
-    // make file 1 the small one
-    ByteBuffer bb1 = ByteBuffer.wrap(DFSTestUtil.loadFile(filename1));
-    ByteBuffer bb2 = ByteBuffer.wrap(DFSTestUtil.loadFile(filename2));
-
-    // compare from 0 to capacity
-    bb1.position(0);
-    bb1.limit(bb1.capacity());
-    bb2.position(0);
-    bb2.limit(bb2.capacity());
-
-    return bb1.equals(bb2);
-  }
 }

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
 Fri Apr 18 16:32:35 2014
@@ -28,6 +28,10 @@ import java.io.PrintWriter;
 import java.io.RandomAccessFile;
 import java.io.StringReader;
 import java.io.StringWriter;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
@@ -51,7 +55,9 @@ import org.apache.hadoop.hdfs.Distribute
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.token.Token;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -64,31 +70,13 @@ import org.xml.sax.helpers.DefaultHandle
 
 import com.google.common.collect.Maps;
 
-/**
- * Test function of OfflineImageViewer by: * confirming it can correctly 
process
- * a valid fsimage file and that the processing generates a correct
- * representation of the namespace * confirming it correctly fails to process 
an
- * fsimage file with a layout version it shouldn't be able to handle * confirm
- * it correctly bails on malformed image files, in particular, a file that ends
- * suddenly.
- */
 public class TestOfflineImageViewer {
-  private static final Log LOG = LogFactory.getLog(OfflineImageViewer.class);
+  private static final Log LOG = LogFactory.getLog(OfflineImageViewerPB.class);
   private static final int NUM_DIRS = 3;
   private static final int FILES_PER_DIR = 4;
   private static final String TEST_RENEWER = "JobTracker";
   private static File originalFsimage = null;
 
-  // Elements of lines of ls-file output to be compared to FileStatus instance
-  private static final class LsElements {
-    private String perms;
-    private int replication;
-    private String username;
-    private String groupname;
-    private long filesize;
-    private boolean isDir;
-  }
-
   // namespace as written to dfs, to be compared with viewer's output
   final static HashMap<String, FileStatus> writtenFiles = Maps.newHashMap();
 
@@ -176,37 +164,6 @@ public class TestOfflineImageViewer {
     return hdfs.getFileStatus(new Path(file));
   }
 
-  // Verify that we can correctly generate an ls-style output for a valid
-  // fsimage
-  @Test
-  public void outputOfLSVisitor() throws IOException {
-    StringWriter output = new StringWriter();
-    PrintWriter out = new PrintWriter(output);
-    LsrPBImage v = new LsrPBImage(new Configuration(), out);
-    v.visit(new RandomAccessFile(originalFsimage, "r"));
-    out.close();
-    Pattern pattern = Pattern
-        
.compile("([d\\-])([rwx\\-]{9})\\s*(-|\\d+)\\s*(\\w+)\\s*(\\w+)\\s*(\\d+)\\s*(\\d+)\\s*([\b/]+)");
-    int count = 0;
-    for (String s : output.toString().split("\n")) {
-      Matcher m = pattern.matcher(s);
-      assertTrue(m.find());
-      LsElements e = new LsElements();
-      e.isDir = m.group(1).equals("d");
-      e.perms = m.group(2);
-      e.replication = m.group(3).equals("-") ? 0 : 
Integer.parseInt(m.group(3));
-      e.username = m.group(4);
-      e.groupname = m.group(5);
-      e.filesize = Long.parseLong(m.group(7));
-      String path = m.group(8);
-      if (!path.equals("/")) {
-        compareFiles(writtenFiles.get(path), e);
-      }
-      ++count;
-    }
-    assertEquals(writtenFiles.size() + 1, count);
-  }
-
   @Test(expected = IOException.class)
   public void testTruncatedFSImage() throws IOException {
     File truncatedFile = folder.newFile();
@@ -216,18 +173,6 @@ public class TestOfflineImageViewer {
         output)).visit(new RandomAccessFile(truncatedFile, "r"));
   }
 
-  // Compare two files as listed in the original namespace FileStatus and
-  // the output of the ls file from the image processor
-  private void compareFiles(FileStatus fs, LsElements elements) {
-    assertEquals("directory listed as such", fs.isDirectory(), elements.isDir);
-    assertEquals("perms string equal", fs.getPermission().toString(),
-        elements.perms);
-    assertEquals("replication equal", fs.getReplication(), 
elements.replication);
-    assertEquals("owner equal", fs.getOwner(), elements.username);
-    assertEquals("group equal", fs.getGroup(), elements.groupname);
-    assertEquals("lengths equal", fs.getLen(), elements.filesize);
-  }
-
   private void copyPartOfFile(File src, File dest) throws IOException {
     FileInputStream in = null;
     FileOutputStream out = null;
@@ -297,4 +242,87 @@ public class TestOfflineImageViewer {
     final String xml = output.getBuffer().toString();
     parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());
   }
+
+  @Test
+  public void testWebImageViewer() throws IOException, InterruptedException,
+      URISyntaxException {
+    WebImageViewer viewer = new WebImageViewer(
+        NetUtils.createSocketAddr("localhost:0"));
+    try {
+      viewer.initServer(originalFsimage.getAbsolutePath());
+      int port = viewer.getPort();
+
+      // create a WebHdfsFileSystem instance
+      URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
+      Configuration conf = new Configuration();
+      WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.get(uri, conf);
+
+      // verify the number of directories
+      FileStatus[] statuses = webhdfs.listStatus(new Path("/"));
+      assertEquals(NUM_DIRS, statuses.length);
+
+      // verify the number of files in the directory
+      statuses = webhdfs.listStatus(new Path("/dir0"));
+      assertEquals(FILES_PER_DIR, statuses.length);
+
+      // compare a file
+      FileStatus status = webhdfs.listStatus(new Path("/dir0/file0"))[0];
+      FileStatus expected = writtenFiles.get("/dir0/file0");
+      compareFile(expected, status);
+
+      // LISTSTATUS operation to a invalid path
+      URL url = new URL("http://localhost:"; + port +
+                    "/webhdfs/v1/invalid/?op=LISTSTATUS");
+      verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
+
+      // LISTSTATUS operation to a invalid prefix
+      url = new URL("http://localhost:"; + port + "/webhdfs/v1?op=LISTSTATUS");
+      verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
+
+      // GETFILESTATUS operation
+      status = webhdfs.getFileStatus(new Path("/dir0/file0"));
+      compareFile(expected, status);
+
+      // GETFILESTATUS operation to a invalid path
+      url = new URL("http://localhost:"; + port +
+                    "/webhdfs/v1/invalid/?op=GETFILESTATUS");
+      verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
+
+      // invalid operation
+      url = new URL("http://localhost:"; + port + "/webhdfs/v1/?op=INVALID");
+      verifyHttpResponseCode(HttpURLConnection.HTTP_BAD_REQUEST, url);
+
+      // invalid method
+      url = new URL("http://localhost:"; + port + "/webhdfs/v1/?op=LISTSTATUS");
+      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("POST");
+      connection.connect();
+      assertEquals(HttpURLConnection.HTTP_BAD_METHOD,
+          connection.getResponseCode());
+    } finally {
+      // shutdown the viewer
+      viewer.shutdown();
+    }
+  }
+
+  private static void compareFile(FileStatus expected, FileStatus status) {
+    assertEquals(expected.getAccessTime(), status.getAccessTime());
+    assertEquals(expected.getBlockSize(), status.getBlockSize());
+    assertEquals(expected.getGroup(), status.getGroup());
+    assertEquals(expected.getLen(), status.getLen());
+    assertEquals(expected.getModificationTime(),
+        status.getModificationTime());
+    assertEquals(expected.getOwner(), status.getOwner());
+    assertEquals(expected.getPermission(), status.getPermission());
+    assertEquals(expected.getReplication(), status.getReplication());
+    assertEquals(expected.isDirectory(), status.isDirectory());
+  }
+
+  private void verifyHttpResponseCode(int expectedCode, URL url)
+      throws IOException {
+    HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+    connection.setRequestMethod("GET");
+    connection.connect();
+    assertEquals(expectedCode, connection.getResponseCode());
+  }
 }

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
 Fri Apr 18 16:32:35 2014
@@ -52,7 +52,7 @@ public class TestFSMainOperationsWebHdfs
   private static FileSystem fileSystem;
   
   public TestFSMainOperationsWebHdfs() {
-    super();
+    super("/tmp/TestFSMainOperationsWebHdfs");
   }
 
   @Override

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
 Fri Apr 18 16:32:35 2014
@@ -83,16 +83,6 @@ public class TestHttpsFileSystem {
   }
 
   @Test
-  public void testHsftpFileSystem() throws Exception {
-    FileSystem fs = FileSystem.get(new URI("hsftp://"; + nnAddr), conf);
-    Assert.assertTrue(fs.exists(new Path("/test")));
-    InputStream is = fs.open(new Path("/test"));
-    Assert.assertEquals(23, is.read());
-    is.close();
-    fs.close();
-  }
-
-  @Test
   public void testSWebHdfsFileSystem() throws Exception {
     FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, "swebhdfs");
     final Path f = new Path("/testswebhdfs");

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
 Fri Apr 18 16:32:35 2014
@@ -182,9 +182,8 @@ public class TestWebHdfsFileSystemContra
     final Path p = new Path("/test/testOpenNonExistFile");
     //open it as a file, should get FileNotFoundException 
     try {
-      final FSDataInputStream in = fs.open(p);
-      in.read();
-      fail();
+      fs.open(p);
+      fail("Expected FileNotFoundException was not thrown");
     } catch(FileNotFoundException fnfe) {
       WebHdfsFileSystem.LOG.info("This is expected.", fnfe);
     }
@@ -410,7 +409,7 @@ public class TestWebHdfsFileSystemContra
           new DoAsParam(ugi.getShortUserName() + "proxy"));
       final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
       conn.connect();
-      assertEquals(HttpServletResponse.SC_UNAUTHORIZED, 
conn.getResponseCode());
+      assertEquals(HttpServletResponse.SC_FORBIDDEN, conn.getResponseCode());
       conn.disconnect();
     }
 

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java
 Fri Apr 18 16:32:35 2014
@@ -179,14 +179,14 @@ public class TestRefreshUserMappings {
     
     // check before
     try {
-      ProxyUsers.authorize(ugi1, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi1, "127.0.0.1");
       fail("first auth for " + ugi1.getShortUserName() + " should've failed ");
     } catch (AuthorizationException e) {
       // expected
       System.err.println("auth for " + ugi1.getUserName() + " failed");
     }
     try {
-      ProxyUsers.authorize(ugi2, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi2, "127.0.0.1");
       System.err.println("auth for " + ugi2.getUserName() + " succeeded");
       // expected
     } catch (AuthorizationException e) {
@@ -204,14 +204,14 @@ public class TestRefreshUserMappings {
     admin.run(args);
     
     try {
-      ProxyUsers.authorize(ugi2, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi2, "127.0.0.1");
       fail("second auth for " + ugi2.getShortUserName() + " should've failed 
");
     } catch (AuthorizationException e) {
       // expected
       System.err.println("auth for " + ugi2.getUserName() + " failed");
     }
     try {
-      ProxyUsers.authorize(ugi1, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi1, "127.0.0.1");
       System.err.println("auth for " + ugi1.getUserName() + " succeeded");
       // expected
     } catch (AuthorizationException e) {

Modified: 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/FakeRenewer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/FakeRenewer.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/FakeRenewer.java
 (original)
+++ 
hadoop/common/branches/HADOOP-10388/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/FakeRenewer.java
 Fri Apr 18 16:32:35 2014
@@ -27,7 +27,7 @@ import org.apache.hadoop.security.token.
 public class FakeRenewer extends TokenRenewer {
   static Token<?> lastRenewed = null;
   static Token<?> lastCanceled = null;
-  static final Text KIND = new Text("TESTING-TOKEN-KIND");
+  public static final Text KIND = new Text("TESTING-TOKEN-KIND");
 
   @Override
   public boolean handleKind(Text kind) {
@@ -54,4 +54,12 @@ public class FakeRenewer extends TokenRe
     lastRenewed = null;
     lastCanceled = null;
   }
+
+  public static Token<?> getLastRenewed() {
+    return lastRenewed;
+  }
+
+  public static Token<?> getLastCanceled() {
+    return lastCanceled;
+  }
 }


Reply via email to