Author: wheat9
Date: Thu Mar 27 18:17:21 2014
New Revision: 1582434

URL: http://svn.apache.org/r1582434
Log:
HDFS-5978. Merge r1582433 from trunk.

Added:
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
      - copied unchanged from r1582433, 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
      - copied unchanged from r1582433, 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/WebImageViewer.java
      - copied unchanged from r1582433, 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/WebImageViewer.java
Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/pom.xml
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1582434&r1=1582433&r2=1582434&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
Thu Mar 27 18:17:21 2014
@@ -21,7 +21,10 @@ Release 2.5.0 - UNRELEASED
 
     HDFS-6119. FSNamesystem code cleanup. (suresh)
 
-    HDFS-6158. Clean up dead code for OfflineImageViewer (wheat9)
+    HDFS-6158. Clean up dead code for OfflineImageViewer. (wheat9)
+
+    HDFS-5978. Create a tool to take fsimage and expose read-only WebHDFS API.
+    (Akira Ajisaka via wheat9)
 
   OPTIMIZATIONS
 

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/pom.xml?rev=1582434&r1=1582433&r2=1582434&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/pom.xml 
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/pom.xml Thu 
Mar 27 18:17:21 2014
@@ -178,7 +178,7 @@ http://maven.apache.org/xsd/maven-4.0.0.
     <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty</artifactId>
-      <scope>test</scope>
+      <scope>compile</scope>
     </dependency>
   </dependencies>
 

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java?rev=1582434&r1=1582433&r2=1582434&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java
 Thu Mar 27 18:17:21 2014
@@ -34,6 +34,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.net.NetUtils;
 
 /**
  * OfflineImageViewerPB to dump the contents of an Hadoop image file to XML or
@@ -69,6 +70,8 @@ public class OfflineImageViewerPB {
       + "    -maxSize specifies the range [0, maxSize] of file sizes to be\n"
       + "     analyzed (128GB by default).\n"
       + "    -step defines the granularity of the distribution. (2MB by 
default)\n"
+      + "  * Web: Run a viewer to expose read-only WebHDFS API.\n"
+      + "    -addr specifies the address to listen. (localhost:5978 by 
default)\n"
       + "\n"
       + "Required command line arguments:\n"
       + "-i,--inputFile <arg>   FSImage file to process.\n"
@@ -103,6 +106,7 @@ public class OfflineImageViewerPB {
     options.addOption("h", "help", false, "");
     options.addOption("maxSize", true, "");
     options.addOption("step", true, "");
+    options.addOption("addr", true, "");
 
     return options;
   }
@@ -161,6 +165,10 @@ public class OfflineImageViewerPB {
       } else if (processor.equals("XML")) {
         new PBImageXmlWriter(conf, out).visit(new RandomAccessFile(inputFile,
             "r"));
+      } else if (processor.equals("Web")) {
+        String addr = cmd.getOptionValue("addr", "localhost:5978");
+        new WebImageViewer(NetUtils.createSocketAddr(addr))
+            .initServerAndWait(inputFile);
       } else {
         new LsrPBImage(conf, out).visit(new RandomAccessFile(inputFile, "r"));
       }

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java?rev=1582434&r1=1582433&r2=1582434&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
 Thu Mar 27 18:17:21 2014
@@ -28,9 +28,13 @@ import java.io.PrintWriter;
 import java.io.RandomAccessFile;
 import java.io.StringReader;
 import java.io.StringWriter;
+import java.net.HttpURLConnection;
+import java.net.URL;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -52,8 +56,12 @@ import org.apache.hadoop.hdfs.MiniDFSClu
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.token.Token;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
@@ -289,4 +297,66 @@ public class TestOfflineImageViewer {
     final String xml = output.getBuffer().toString();
     parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());
   }
+
+  @Test
+  public void testWebImageViewer() throws IOException, InterruptedException {
+    WebImageViewer viewer = new WebImageViewer(
+        NetUtils.createSocketAddr("localhost:0"));
+    try {
+      viewer.initServer(originalFsimage.getAbsolutePath());
+      int port = viewer.getPort();
+
+      // 1. LISTSTATUS operation to a valid path
+      URL url = new URL("http://localhost:"; + port + "/?op=LISTSTATUS");
+      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("GET");
+      connection.connect();
+      assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
+      assertEquals("application/json", connection.getContentType());
+
+      String content = org.apache.commons.io.IOUtils.toString(
+          connection.getInputStream());
+      LOG.info("content: " + content);
+
+      // verify the number of directories listed
+      ObjectMapper mapper = new ObjectMapper();
+      Map<String, Map<String, List<Map<String, Object>>>> fileStatuses =
+          mapper.readValue(content, new TypeReference
+          <Map<String, Map<String, List<Map<String, Object>>>>>(){});
+      List<Map<String, Object>> fileStatusList = fileStatuses
+          .get("FileStatuses").get("FileStatus");
+      assertEquals(NUM_DIRS, fileStatusList.size());
+
+      // verify the number of files in a directory
+      Map<String, Object> fileStatusMap = fileStatusList.get(0);
+      assertEquals(FILES_PER_DIR, fileStatusMap.get("childrenNum"));
+
+      // 2. LISTSTATUS operation to a invalid path
+      url = new URL("http://localhost:"; + port + "/invalid/?op=LISTSTATUS");
+      connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("GET");
+      connection.connect();
+      assertEquals(HttpURLConnection.HTTP_NOT_FOUND,
+                   connection.getResponseCode());
+
+      // 3. invalid operation
+      url = new URL("http://localhost:"; + port + "/?op=INVALID");
+      connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("GET");
+      connection.connect();
+      assertEquals(HttpURLConnection.HTTP_BAD_REQUEST,
+          connection.getResponseCode());
+
+      // 4. invalid method
+      url = new URL("http://localhost:"; + port + "/?op=LISTSTATUS");
+      connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("POST");
+      connection.connect();
+      assertEquals(HttpURLConnection.HTTP_BAD_METHOD,
+          connection.getResponseCode());
+    } finally {
+      // shutdown the viewer
+      viewer.shutdown();
+    }
+  }
 }


Reply via email to