Author: szetszwo
Date: Thu Jan 31 21:39:42 2013
New Revision: 1441206

URL: http://svn.apache.org/viewvc?rev=1441206&view=rev
Log:
Merge r1440222 through r1441205 from trunk.

Added:
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
      - copied unchanged from r1441205, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm
      - copied unchanged from r1441205, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
      - copied unchanged from r1441205, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/Superusers.apt.vm
      - copied unchanged from r1441205, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/Superusers.apt.vm
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tar
      - copied unchanged from r1441205, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tar
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tgz
      - copied unchanged from r1441205, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tgz
Removed:
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/Superusers.xml
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/deployment_layout.xml
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/native_libraries.xml
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/service_level_auth.xml
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/single_node_setup.xml
Modified:
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
   (contents, props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/pom.xml
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/
   (props changed)
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Jan 31 21:39:42 2013
@@ -325,6 +325,9 @@ Trunk (Unreleased)
     HADOOP-9249. hadoop-maven-plugins version-info goal causes build failure
     when running with Clover. (Chris Nauroth via suresh)
 
+    HADOOP-9264. Port change to use Java untar API on Windows from 
+    branch-1-win to trunk. (Chris Nauroth via suresh)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -586,6 +589,10 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-8857. hadoop.http.authentication.signature.secret.file docs 
     should not state that secret is randomly generated. (tucu)
 
+    HADOOP-9221. Convert remaining xdocs to APT. (Andy Isaacson via atm)
+
+    HADOOP-8981. TestMetricsSystemImpl fails on Windows. (Xuan Gong via suresh)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Propchange: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1440222-1441205

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/pom.xml?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/pom.xml 
(original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/pom.xml 
Thu Jan 31 21:39:42 2013
@@ -241,6 +241,11 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
+      <version>1.4</version>
+    </dependency>
   </dependencies>
 
   <build>
@@ -382,6 +387,23 @@
             </configuration>
           </execution>
           <execution>
+            <id>copy-test-tarballs</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <copy toDir="${test.cache.data}">
+                  <fileset dir="${basedir}/src/test/java/org/apache/hadoop/fs">
+                    <include name="test-untar.tar"/>
+                    <include name="test-untar.tgz"/>
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
             <phase>pre-site</phase>
             <goals>
               <goal>run</goal>
@@ -485,6 +507,7 @@
             <exclude>src/test/all-tests</exclude>
             <exclude>src/test/resources/kdc/ldif/users.ldif</exclude>
             
<exclude>src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c</exclude>
+            
<exclude>src/test/java/org/apache/hadoop/fs/test-untar.tgz</exclude>
           </excludes>
         </configuration>
       </plugin>

Propchange: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1440222-1441205

Propchange: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1440222-1441205

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 Thu Jan 31 21:39:42 2013
@@ -1128,6 +1128,17 @@ public abstract class FileSystem extends
   public abstract FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException;
 
+  /**
+   * Concat existing files together.
+   * @param trg the path to the target destination.
+   * @param psrcs the paths to the sources to use for the concatenation.
+   * @throws IOException
+   */
+  public void concat(final Path trg, final Path [] psrcs) throws IOException {
+    throw new UnsupportedOperationException("Not implemented by the " + 
+        getClass().getSimpleName() + " FileSystem implementation");
+  }
+
  /**
    * Get replication.
    * 

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
 Thu Jan 31 21:39:42 2013
@@ -21,9 +21,12 @@ package org.apache.hadoop.fs;
 import java.io.*;
 import java.util.Arrays;
 import java.util.Enumeration;
+import java.util.zip.GZIPInputStream;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipFile;
 
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -624,14 +627,28 @@ public class FileUtil {
    * @throws IOException
    */
   public static void unTar(File inFile, File untarDir) throws IOException {
-    if (!untarDir.mkdirs()) {           
+    if (!untarDir.mkdirs()) {
       if (!untarDir.isDirectory()) {
         throw new IOException("Mkdirs failed to create " + untarDir);
       }
     }
 
-    StringBuilder untarCommand = new StringBuilder();
     boolean gzipped = inFile.toString().endsWith("gz");
+    if(Shell.WINDOWS) {
+      // Tar is not native to Windows. Use simple Java based implementation 
for 
+      // tests and simple tar archives
+      unTarUsingJava(inFile, untarDir, gzipped);
+    }
+    else {
+      // spawn tar utility to untar archive for full fledged unix behavior 
such 
+      // as resolving symlinks in tar archives
+      unTarUsingTar(inFile, untarDir, gzipped);
+    }
+  }
+  
+  private static void unTarUsingTar(File inFile, File untarDir,
+      boolean gzipped) throws IOException {
+    StringBuffer untarCommand = new StringBuffer();
     if (gzipped) {
       untarCommand.append(" gzip -dc '");
       untarCommand.append(FileUtil.makeShellPath(inFile));
@@ -656,7 +673,62 @@ public class FileUtil {
                   ". Tar process exited with exit code " + exitcode);
     }
   }
+  
+  private static void unTarUsingJava(File inFile, File untarDir,
+      boolean gzipped) throws IOException {
+    InputStream inputStream = null;
+    if (gzipped) {
+      inputStream = new BufferedInputStream(new GZIPInputStream(
+          new FileInputStream(inFile)));
+    } else {
+      inputStream = new BufferedInputStream(new FileInputStream(inFile));
+    }
+
+    TarArchiveInputStream tis = new TarArchiveInputStream(inputStream);
+
+    for (TarArchiveEntry entry = tis.getNextTarEntry(); entry != null;) {
+      unpackEntries(tis, entry, untarDir);
+      entry = tis.getNextTarEntry();
+    }
+  }
+  
+  private static void unpackEntries(TarArchiveInputStream tis,
+      TarArchiveEntry entry, File outputDir) throws IOException {
+    if (entry.isDirectory()) {
+      File subDir = new File(outputDir, entry.getName());
+      if (!subDir.mkdir() && !subDir.isDirectory()) {
+        throw new IOException("Mkdirs failed to create tar internal dir "
+            + outputDir);
+      }
+
+      for (TarArchiveEntry e : entry.getDirectoryEntries()) {
+        unpackEntries(tis, e, subDir);
+      }
 
+      return;
+    }
+
+    File outputFile = new File(outputDir, entry.getName());
+    if (!outputDir.exists()) {
+      if (!outputDir.mkdirs()) {
+        throw new IOException("Mkdirs failed to create tar internal dir "
+            + outputDir);
+      }
+    }
+
+    int count;
+    byte data[] = new byte[2048];
+    BufferedOutputStream outputStream = new BufferedOutputStream(
+        new FileOutputStream(outputFile));
+
+    while ((count = tis.read(data)) != -1) {
+      outputStream.write(data, 0, count);
+    }
+
+    outputStream.flush();
+    outputStream.close();
+  }
+  
   /**
    * Class for creating hardlinks.
    * Supports Unix, Cygwin, WindXP.

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
 Thu Jan 31 21:39:42 2013
@@ -160,6 +160,11 @@ public class FilterFileSystem extends Fi
   }
 
   @Override
+  public void concat(Path f, Path[] psrcs) throws IOException {
+    fs.concat(f, psrcs);
+  }
+
+  @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
       Progressable progress) throws IOException {

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java
 Thu Jan 31 21:39:42 2013
@@ -55,6 +55,9 @@ public enum Errno {
   EPIPE,
   EDOM,
   ERANGE,
+  ELOOP,
+  ENAMETOOLONG,
+  ENOTEMPTY,
 
   UNKNOWN;
 }

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
 Thu Jan 31 21:39:42 2013
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.io.nativeio;
 
+import java.io.File;
 import java.io.FileDescriptor;
 import java.io.IOException;
 import java.util.Map;
@@ -293,4 +294,35 @@ public class NativeIO {
     stat.group = getName(IdCache.GROUP, stat.groupId);
     return stat;
   }
+  
+  /**
+   * A version of renameTo that throws a descriptive exception when it fails.
+   *
+   * @param src                  The source path
+   * @param dst                  The destination path
+   * 
+   * @throws NativeIOException   On failure.
+   */
+  public static void renameTo(File src, File dst)
+      throws IOException {
+    if (!nativeLoaded) {
+      if (!src.renameTo(dst)) {
+        throw new IOException("renameTo(src=" + src + ", dst=" +
+          dst + ") failed.");
+      }
+    } else {
+      renameTo0(src.getAbsolutePath(), dst.getAbsolutePath());
+    }
+  }
+
+  /**
+   * A version of renameTo that throws a descriptive exception when it fails.
+   *
+   * @param src                  The source path
+   * @param dst                  The destination path
+   * 
+   * @throws NativeIOException   On failure.
+   */
+  private static native void renameTo0(String src, String dst)
+      throws NativeIOException;
 }

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 Thu Jan 31 21:39:42 2013
@@ -24,11 +24,12 @@
 #include <grp.h>
 #include <jni.h>
 #include <pwd.h>
+#include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
 #include <sys/stat.h>
-#include <sys/types.h>
 #include <sys/syscall.h>
+#include <sys/types.h>
 #include <unistd.h>
 
 #include "config.h"
@@ -502,6 +503,26 @@ ssize_t get_pw_buflen() {
   #endif
   return (ret > 512) ? ret : 512;
 }
+
+JNIEXPORT void JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env, 
+jclass clazz, jstring jsrc, jstring jdst)
+{
+  const char *src = NULL, *dst = NULL;
+  
+  src = (*env)->GetStringUTFChars(env, jsrc, NULL);
+  if (!src) goto done; // exception was thrown
+  dst = (*env)->GetStringUTFChars(env, jdst, NULL);
+  if (!dst) goto done; // exception was thrown
+  if (rename(src, dst)) {
+    throw_ioe(env, errno);
+  }
+
+done:
+  if (src) (*env)->ReleaseStringUTFChars(env, jsrc, src);
+  if (dst) (*env)->ReleaseStringUTFChars(env, jdst, dst);
+}
+
 /**
  * vim: sw=2: ts=2: et:
  */

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
 Thu Jan 31 21:39:42 2013
@@ -63,6 +63,9 @@ static errno_mapping_t ERRNO_MAPPINGS[] 
   MAPPING(EPIPE),
   MAPPING(EDOM),
   MAPPING(ERANGE),
+  MAPPING(ELOOP),
+  MAPPING(ENAMETOOLONG),
+  MAPPING(ENOTEMPTY),
   {-1, NULL}
 };
 

Propchange: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1440222-1441205

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
 Thu Jan 31 21:39:42 2013
@@ -546,4 +546,44 @@ public class TestFileUtil {
     long expected = 2 * (3 + System.getProperty("line.separator").length());
     Assert.assertEquals(expected, du);
   }
+
+  private void doUntarAndVerify(File tarFile, File untarDir) 
+                                 throws IOException {
+    if (untarDir.exists() && !FileUtil.fullyDelete(untarDir)) {
+      throw new IOException("Could not delete directory '" + untarDir + "'");
+    }
+    FileUtil.unTar(tarFile, untarDir);
+
+    String parentDir = untarDir.getCanonicalPath() + Path.SEPARATOR + "name";
+    File testFile = new File(parentDir + Path.SEPARATOR + "version");
+    Assert.assertTrue(testFile.exists());
+    Assert.assertTrue(testFile.length() == 0);
+    String imageDir = parentDir + Path.SEPARATOR + "image";
+    testFile = new File(imageDir + Path.SEPARATOR + "fsimage");
+    Assert.assertTrue(testFile.exists());
+    Assert.assertTrue(testFile.length() == 157);
+    String currentDir = parentDir + Path.SEPARATOR + "current";
+    testFile = new File(currentDir + Path.SEPARATOR + "fsimage");
+    Assert.assertTrue(testFile.exists());
+    Assert.assertTrue(testFile.length() == 4331);
+    testFile = new File(currentDir + Path.SEPARATOR + "edits");
+    Assert.assertTrue(testFile.exists());
+    Assert.assertTrue(testFile.length() == 1033);
+    testFile = new File(currentDir + Path.SEPARATOR + "fstime");
+    Assert.assertTrue(testFile.exists());
+    Assert.assertTrue(testFile.length() == 8);
+  }
+
+  @Test
+  public void testUntar() throws IOException {
+    String tarGzFileName = System.getProperty("test.cache.data",
+        "build/test/cache") + "/test-untar.tgz";
+    String tarFileName = System.getProperty("test.cache.data",
+        "build/test/cache") + "/test-untar.tar";
+    String dataDir = System.getProperty("test.build.data", "build/test/data");
+    File untarDir = new File(dataDir, "untarDir");
+
+    doUntarAndVerify(new File(tarGzFileName), untarDir);
+    doUntarAndVerify(new File(tarFileName), untarDir);
+  }
 }

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
 Thu Jan 31 21:39:42 2013
@@ -25,11 +25,14 @@ import java.io.IOException;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.ArrayList;
 import java.util.List;
+
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import static org.junit.Assume.*;
 import static org.junit.Assert.*;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -293,4 +296,40 @@ public class TestNativeIO {
     assertFalse(NativeIO.getGroupName(0).isEmpty());
   }
 
+  @Test
+  public void testRenameTo() throws Exception {
+    final File TEST_DIR = new File(new File(
+        System.getProperty("test.build.data","build/test/data")), 
"renameTest");
+    assumeTrue(TEST_DIR.mkdirs());
+    File nonExistentFile = new File(TEST_DIR, "nonexistent");
+    File targetFile = new File(TEST_DIR, "target");
+    // Test attempting to rename a nonexistent file.
+    try {
+      NativeIO.renameTo(nonExistentFile, targetFile);
+      Assert.fail();
+    } catch (NativeIOException e) {
+      Assert.assertEquals(e.getErrno(), Errno.ENOENT);
+    }
+    
+    // Test renaming a file to itself.  It should succeed and do nothing.
+    File sourceFile = new File(TEST_DIR, "source");
+    Assert.assertTrue(sourceFile.createNewFile());
+    NativeIO.renameTo(sourceFile, sourceFile);
+
+    // Test renaming a source to a destination.
+    NativeIO.renameTo(sourceFile, targetFile);
+
+    // Test renaming a source to a path which uses a file as a directory.
+    sourceFile = new File(TEST_DIR, "source");
+    Assert.assertTrue(sourceFile.createNewFile());
+    File badTarget = new File(targetFile, "subdir");
+    try {
+      NativeIO.renameTo(sourceFile, badTarget);
+      Assert.fail();
+    } catch (NativeIOException e) {
+      Assert.assertEquals(e.getErrno(), Errno.ENOTDIR);
+    }
+
+    FileUtils.deleteQuietly(TEST_DIR);
+  }
 }

Modified: 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1441206&r1=1441205&r2=1441206&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
 (original)
+++ 
hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
 Thu Jan 31 21:39:42 2013
@@ -56,6 +56,7 @@ import org.apache.hadoop.metrics2.lib.Mu
 import org.apache.hadoop.metrics2.lib.MutableRate;
 import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 
 /**
  * Test the MetricsSystemImpl class
@@ -80,7 +81,7 @@ public class TestMetricsSystemImpl {
     }
   }
 
-  @Test public void testInitFirst() throws Exception {
+  @Test public void testInitFirstVerifyStopInvokedImmediately() throws 
Exception {
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
         .add("test.sink.test.class", TestSink.class.getName())
@@ -106,14 +107,61 @@ public class TestMetricsSystemImpl {
     ms.stop();
     ms.shutdown();
 
-    verify(sink1, times(2)).putMetrics(r1.capture());
+    //When we call stop, at most two sources will be consumed by each sink 
thread.
+    verify(sink1, atMost(2)).putMetrics(r1.capture());
+    List<MetricsRecord> mr1 = r1.getAllValues();
+    verify(sink2, atMost(2)).putMetrics(r2.capture());
+    List<MetricsRecord> mr2 = r2.getAllValues();
+    if (mr1.size() != 0 && mr2.size() != 0) {
+      checkMetricsRecords(mr1);
+      assertEquals("output", mr1, mr2);
+    } else if (mr1.size() != 0) {
+      checkMetricsRecords(mr1);
+    } else if (mr2.size() != 0) {
+      checkMetricsRecords(mr2);
+    }
+  }
+
+  @Test public void testInitFirstVerifyCallBacks() throws Exception {
+    DefaultMetricsSystem.shutdown(); 
+    new ConfigBuilder().add("*.period", 8)
+        //.add("test.sink.plugin.urls", getPluginUrlsAsString())
+        .add("test.sink.test.class", TestSink.class.getName())
+        .add("test.*.source.filter.exclude", "s0")
+        .add("test.source.s1.metric.filter.exclude", "X*")
+        .add("test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("test.sink.sink2.metric.filter.exclude", "Y*")
+        .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
+    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
+    ms.start();
+    ms.register("s0", "s0 desc", new TestSource("s0rec"));
+    TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
+    s1.c1.incr();
+    s1.xxx.incr();
+    s1.g1.set(2);
+    s1.yyy.incr(2);
+    s1.s1.add(0);
+    MetricsSink sink1 = mock(MetricsSink.class);
+    MetricsSink sink2 = mock(MetricsSink.class);
+    ms.registerSink("sink1", "sink1 desc", sink1);
+    ms.registerSink("sink2", "sink2 desc", sink2);
+    ms.publishMetricsNow(); // publish the metrics
+
+    try {
+      verify(sink1, timeout(200).times(2)).putMetrics(r1.capture());
+      verify(sink2, timeout(200).times(2)).putMetrics(r2.capture());
+    } finally {
+      ms.stop();
+      ms.shutdown();
+    }
+    //When we call stop, at most two sources will be consumed by each sink 
thread.
     List<MetricsRecord> mr1 = r1.getAllValues();
-    verify(sink2, times(2)).putMetrics(r2.capture());
     List<MetricsRecord> mr2 = r2.getAllValues();
     checkMetricsRecords(mr1);
     assertEquals("output", mr1, mr2);
-  }
 
+  }
+  
   @Test public void testMultiThreadedPublish() throws Exception {
     new ConfigBuilder().add("*.period", 80)
       .add("test.sink.Collector.queue.capacity", "20")


Reply via email to