Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 55c604517 -> 1df23089e (forced update)


HADOOP-13068. Clean up RunJar and related test class. (Contributed by Andras 
Bokor)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f343d91e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f343d91e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f343d91e

Branch: refs/heads/HADOOP-12930
Commit: f343d91ecc0d1c6d9dc9810faf68ec04f7b07c2f
Parents: 36972d6
Author: Arpit Agarwal <a...@apache.org>
Authored: Wed May 4 09:49:33 2016 -0700
Committer: Arpit Agarwal <a...@apache.org>
Committed: Wed May 4 09:49:33 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/util/RunJar.java     | 89 +++++++++++---------
 .../java/org/apache/hadoop/util/TestRunJar.java | 62 +++++++-------
 2 files changed, 80 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f343d91e/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
index 52cf05c..19b51ad 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
@@ -23,7 +23,6 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.lang.reflect.Array;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.net.MalformedURLException;
@@ -40,7 +39,6 @@ import java.util.regex.Pattern;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.IOUtils;
 import org.slf4j.Logger;
@@ -53,7 +51,7 @@ public class RunJar {
 
   private static final Logger LOG = LoggerFactory.getLogger(RunJar.class);
 
-  /** Pattern that matches any string */
+  /** Pattern that matches any string. */
   public static final Pattern MATCH_ANY = Pattern.compile(".*");
 
   /**
@@ -77,9 +75,20 @@ public class RunJar {
       "HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES";
 
   /**
+   * Buffer size for copy the content of compressed file to new file.
+   */
+  private static final int BUFFER_SIZE = 8_192;
+
+  /**
    * Unpack a jar file into a directory.
    *
    * This version unpacks all files inside the jar regardless of filename.
+   *
+   * @param jarFile the .jar file to unpack
+   * @param toDir the destination directory into which to unpack the jar
+   *
+   * @throws IOException if an I/O error has occurred or toDir
+   * cannot be created and does not already exist
    */
   public static void unJar(File jarFile, File toDir) throws IOException {
     unJar(jarFile, toDir, MATCH_ANY);
@@ -92,47 +101,43 @@ public class RunJar {
    * @param jarFile the .jar file to unpack
    * @param toDir the destination directory into which to unpack the jar
    * @param unpackRegex the pattern to match jar entries against
+   *
+   * @throws IOException if an I/O error has occurred or toDir
+   * cannot be created and does not already exist
    */
   public static void unJar(File jarFile, File toDir, Pattern unpackRegex)
-    throws IOException {
-    JarFile jar = new JarFile(jarFile);
-    try {
+      throws IOException {
+    try (JarFile jar = new JarFile(jarFile)) {
       int numOfFailedLastModifiedSet = 0;
       Enumeration<JarEntry> entries = jar.entries();
       while (entries.hasMoreElements()) {
         final JarEntry entry = entries.nextElement();
         if (!entry.isDirectory() &&
             unpackRegex.matcher(entry.getName()).matches()) {
-          InputStream in = jar.getInputStream(entry);
-          try {
+          try (InputStream in = jar.getInputStream(entry)) {
             File file = new File(toDir, entry.getName());
             ensureDirectory(file.getParentFile());
-            OutputStream out = new FileOutputStream(file);
-            try {
-              IOUtils.copyBytes(in, out, 8192);
-            } finally {
-              out.close();
+            try (OutputStream out = new FileOutputStream(file)) {
+              IOUtils.copyBytes(in, out, BUFFER_SIZE);
             }
             if (!file.setLastModified(entry.getTime())) {
               numOfFailedLastModifiedSet++;
             }
-          } finally {
-            in.close();
           }
         }
       }
       if (numOfFailedLastModifiedSet > 0) {
         LOG.warn("Could not set last modfied time for {} file(s)",
-                numOfFailedLastModifiedSet);
+            numOfFailedLastModifiedSet);
       }
-    } finally {
-      jar.close();
     }
   }
 
   /**
    * Ensure the existence of a given directory.
    *
+   * @param dir Directory to check
+   *
    * @throws IOException if it cannot be created and does not already exist
    */
   private static void ensureDirectory(File dir) throws IOException {
@@ -169,7 +174,7 @@ public class RunJar {
     JarFile jarFile;
     try {
       jarFile = new JarFile(fileName);
-    } catch(IOException io) {
+    } catch (IOException io) {
       throw new IOException("Error opening job jar: " + fileName)
         .initCause(io);
     }
@@ -193,11 +198,11 @@ public class RunJar {
     ensureDirectory(tmpDir);
 
     final File workDir;
-    try { 
+    try {
       workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
     } catch (IOException ioe) {
-      // If user has insufficient perms to write to tmpDir, default  
-      // "Permission denied" message doesn't specify a filename. 
+      // If user has insufficient perms to write to tmpDir, default
+      // "Permission denied" message doesn't specify a filename.
       System.err.println("Error creating temp dir in java.io.tmpdir "
                          + tmpDir + " due to " + ioe.getMessage());
       System.exit(-1);
@@ -211,12 +216,12 @@ public class RunJar {
     ensureDirectory(workDir);
 
     ShutdownHookManager.get().addShutdownHook(
-      new Runnable() {
-        @Override
-        public void run() {
-          FileUtil.fullyDelete(workDir);
-        }
-      }, SHUTDOWN_HOOK_PRIORITY);
+        new Runnable() {
+          @Override
+          public void run() {
+            FileUtil.fullyDelete(workDir);
+          }
+        }, SHUTDOWN_HOOK_PRIORITY);
 
 
     unJar(file, workDir);
@@ -225,13 +230,13 @@ public class RunJar {
 
     Thread.currentThread().setContextClassLoader(loader);
     Class<?> mainClass = Class.forName(mainClassName, true, loader);
-    Method main = mainClass.getMethod("main", new Class[] {
-      Array.newInstance(String.class, 0).getClass()
-    });
-    String[] newArgs = Arrays.asList(args)
-      .subList(firstArg, args.length).toArray(new String[0]);
+    Method main = mainClass.getMethod("main", String[].class);
+    List<String> newArgsSubList = Arrays.asList(args)
+        .subList(firstArg, args.length);
+    String[] newArgs = newArgsSubList
+        .toArray(new String[newArgsSubList.size()]);
     try {
-      main.invoke(null, new Object[] { newArgs });
+      main.invoke(null, new Object[] {newArgs});
     } catch (InvocationTargetException e) {
       throw e.getTargetException();
     }
@@ -251,10 +256,10 @@ public class RunJar {
     // see if the client classloader is enabled
     if (useClientClassLoader()) {
       StringBuilder sb = new StringBuilder();
-      sb.append(workDir+"/").
+      sb.append(workDir).append("/").
           append(File.pathSeparator).append(file).
-          append(File.pathSeparator).append(workDir+"/classes/").
-          append(File.pathSeparator).append(workDir+"/lib/*");
+          append(File.pathSeparator).append(workDir).append("/classes/").
+          append(File.pathSeparator).append(workDir).append("/lib/*");
       // HADOOP_CLASSPATH is added to the client classpath
       String hadoopClasspath = getHadoopClasspath();
       if (hadoopClasspath != null && !hadoopClasspath.isEmpty()) {
@@ -270,18 +275,18 @@ public class RunJar {
       loader = new ApplicationClassLoader(clientClasspath,
           getClass().getClassLoader(), systemClassesList);
     } else {
-      List<URL> classPath = new ArrayList<URL>();
-      classPath.add(new File(workDir+"/").toURI().toURL());
+      List<URL> classPath = new ArrayList<>();
+      classPath.add(new File(workDir + "/").toURI().toURL());
       classPath.add(file.toURI().toURL());
       classPath.add(new File(workDir, "classes/").toURI().toURL());
       File[] libs = new File(workDir, "lib").listFiles();
       if (libs != null) {
-        for (int i = 0; i < libs.length; i++) {
-          classPath.add(libs[i].toURI().toURL());
+        for (File lib : libs) {
+          classPath.add(lib.toURI().toURL());
         }
       }
       // create a normal parent-delegating classloader
-      loader = new URLClassLoader(classPath.toArray(new URL[0]));
+      loader = new URLClassLoader(classPath.toArray(new 
URL[classPath.size()]));
     }
     return loader;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f343d91e/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
index 7262534..6622389 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.util;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.when;
 
@@ -29,15 +32,16 @@ import java.util.jar.JarOutputStream;
 import java.util.regex.Pattern;
 import java.util.zip.ZipEntry;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-public class TestRunJar extends TestCase {
+public class TestRunJar {
+  private static final String FOOBAR_TXT = "foobar.txt";
+  private static final String FOOBAZ_TXT = "foobaz.txt";
+  private static final int BUFF_SIZE = 2048;
   private File TEST_ROOT_DIR;
 
   private static final String TEST_JAR_NAME="test-runjar.jar";
@@ -45,9 +49,8 @@ public class TestRunJar extends TestCase {
   private static final long MOCKED_NOW = 1_460_389_972_000L;
   private static final long MOCKED_NOW_PLUS_TWO_SEC = MOCKED_NOW + 2_000;
 
-  @Override
   @Before
-  protected void setUp() throws Exception {
+  public void setUp() throws Exception {
     TEST_ROOT_DIR = GenericTestUtils.getTestDir(getClass().getSimpleName());
     if (!TEST_ROOT_DIR.exists()) {
       TEST_ROOT_DIR.mkdirs();
@@ -56,9 +59,8 @@ public class TestRunJar extends TestCase {
     makeTestJar();
   }
 
-  @Override
   @After
-  protected void tearDown() {
+  public void tearDown() {
     FileUtil.fullyDelete(TEST_ROOT_DIR);
   }
 
@@ -70,11 +72,11 @@ public class TestRunJar extends TestCase {
     File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
     JarOutputStream jstream =
         new JarOutputStream(new FileOutputStream(jarFile));
-    ZipEntry zipEntry1 = new ZipEntry("foobar.txt");
+    ZipEntry zipEntry1 = new ZipEntry(FOOBAR_TXT);
     zipEntry1.setTime(MOCKED_NOW);
     jstream.putNextEntry(zipEntry1);
     jstream.closeEntry();
-    ZipEntry zipEntry2 = new ZipEntry("foobaz.txt");
+    ZipEntry zipEntry2 = new ZipEntry(FOOBAZ_TXT);
     zipEntry2.setTime(MOCKED_NOW_PLUS_TWO_SEC);
     jstream.putNextEntry(zipEntry2);
     jstream.closeEntry();
@@ -86,50 +88,52 @@ public class TestRunJar extends TestCase {
    */
   @Test
   public void testUnJar() throws Exception {
-    File unjarDir = new File(TEST_ROOT_DIR, "unjar-all");
-    assertFalse("unjar dir shouldn't exist at test start",
-                new File(unjarDir, "foobar.txt").exists());
+    File unjarDir = getUnjarDir("unjar-all");
 
     // Unjar everything
     RunJar.unJar(new File(TEST_ROOT_DIR, TEST_JAR_NAME),
                  unjarDir);
     assertTrue("foobar unpacked",
-               new File(unjarDir, "foobar.txt").exists());
+               new File(unjarDir, TestRunJar.FOOBAR_TXT).exists());
     assertTrue("foobaz unpacked",
-               new File(unjarDir, "foobaz.txt").exists());
-
+               new File(unjarDir, FOOBAZ_TXT).exists());
   }
 
   /**
    * Test unjarring a specific regex
    */
+  @Test
   public void testUnJarWithPattern() throws Exception {
-    File unjarDir = new File(TEST_ROOT_DIR, "unjar-pattern");
-    assertFalse("unjar dir shouldn't exist at test start",
-                new File(unjarDir, "foobar.txt").exists());
+    File unjarDir = getUnjarDir("unjar-pattern");
 
     // Unjar only a regex
     RunJar.unJar(new File(TEST_ROOT_DIR, TEST_JAR_NAME),
                  unjarDir,
                  Pattern.compile(".*baz.*"));
     assertFalse("foobar not unpacked",
-                new File(unjarDir, "foobar.txt").exists());
+                new File(unjarDir, TestRunJar.FOOBAR_TXT).exists());
     assertTrue("foobaz unpacked",
-               new File(unjarDir, "foobaz.txt").exists());
-
+               new File(unjarDir, FOOBAZ_TXT).exists());
   }
 
+  @Test
   public void testUnJarDoesNotLooseLastModify() throws Exception {
-    File unjarDir = new File(TEST_ROOT_DIR, "unjar-lastmod");
-    assertFalse("unjar dir shouldn't exist at test start",
-            new File(unjarDir, "foobar.txt").exists());
+    File unjarDir = getUnjarDir("unjar-lastmod");
 
     // Unjar everything
     RunJar.unJar(new File(TEST_ROOT_DIR, TEST_JAR_NAME),
             unjarDir);
 
-    assertEquals("Last modify time was lost during unJar", MOCKED_NOW, new 
File(unjarDir, "foobar.txt").lastModified());
-    assertEquals("Last modify time was lost during unJar", 
MOCKED_NOW_PLUS_TWO_SEC, new File(unjarDir, "foobaz.txt").lastModified());
+    String failureMessage = "Last modify time was lost during unJar";
+    assertEquals(failureMessage, MOCKED_NOW, new File(unjarDir, 
TestRunJar.FOOBAR_TXT).lastModified());
+    assertEquals(failureMessage, MOCKED_NOW_PLUS_TWO_SEC, new File(unjarDir, 
FOOBAZ_TXT).lastModified());
+  }
+
+  private File getUnjarDir(String dirName) {
+    File unjarDir = new File(TEST_ROOT_DIR, dirName);
+    assertFalse("unjar dir shouldn't exist at test start",
+                new File(unjarDir, TestRunJar.FOOBAR_TXT).exists());
+    return unjarDir;
   }
 
   /**
@@ -174,10 +178,10 @@ public class TestRunJar extends TestCase {
       ZipEntry entry = new ZipEntry(name);
       jstream.putNextEntry(entry);
       BufferedInputStream bufInputStream = new BufferedInputStream(
-          entryInputStream, 2048);
+          entryInputStream, BUFF_SIZE);
       int count;
-      byte[] data = new byte[2048];
-      while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
+      byte[] data = new byte[BUFF_SIZE];
+      while ((count = bufInputStream.read(data, 0, BUFF_SIZE)) != -1) {
         jstream.write(data, 0, count);
       }
       jstream.closeEntry();


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to