Author: wang
Date: Tue Aug  5 02:30:54 2014
New Revision: 1615844

URL: http://svn.apache.org/r1615844
Log:
Merge from trunk to branch

Added:
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Classpath.java
      - copied unchanged from r1615843, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Classpath.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
      - copied unchanged from r1615843, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSAudit.java
      - copied unchanged from r1615843, 
hadoop/common/trunk/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSAudit.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/test/resources/log4j-kmsaudit.properties
      - copied unchanged from r1615843, 
hadoop/common/trunk/hadoop-common-project/hadoop-kms/src/test/resources/log4j-kmsaudit.properties
Modified:
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
   (contents, props changed)
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSExceptionsProvider.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
    
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
 Tue Aug  5 02:30:54 2014
@@ -192,6 +192,11 @@ Trunk (Unreleased)
     HADOOP-10891. Add EncryptedKeyVersion factory method to
     KeyProviderCryptoExtension. (wang)
 
+    HADOOP-10756. KMS audit log should consolidate successful similar 
requests. 
+    (asuresh via tucu)
+
+    HADOOP-10793. KeyShell args should use single-dash style. (wang)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -405,6 +410,12 @@ Trunk (Unreleased)
     HADOOP-10881. Clarify usage of encryption and encrypted encryption
     key in KeyProviderCryptoExtension. (wang)
 
+    HADOOP-10920. site plugin couldn't parse hadoop-kms index.apt.vm.
+    (Akira Ajisaka via wang)
+
+    HADOOP-10925. Compilation fails in native link0 function on Windows.
+    (cnauroth)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -463,6 +474,14 @@ Release 2.6.0 - UNRELEASED
     HADOOP-8069. Enable TCP_NODELAY by default for IPC. (Todd Lipcon via
     Arpit Agarwal)
 
+    HADOOP-10902. Deletion of directories with snapshots will not output
+    reason for trash move failure. (Stephen Chu via wang)
+
+    HADOOP-10900. CredentialShell args should use single-dash style. (wang)
+
+    HADOOP-10903. Enhance hadoop classpath command to expand wildcards or write
+    classpath into jar manifest. (cnauroth)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -497,6 +516,15 @@ Release 2.6.0 - UNRELEASED
     HADOOP-10876. The constructor of Path should not take an empty URL as a
     parameter. (Zhihai Xu via wang)
 
+    HADOOP-10928. Incorrect usage on `hadoop credential list`.
+    (Josh Elser via wang)
+
+    HADOOP-10927. Fix CredentialShell help behavior and error codes.
+    (Josh Elser via wang)
+
+    HADOOP-10937. Need to set version name correctly before decrypting EEK.
+    (Arun Suresh via wang)
+
 Release 2.5.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -637,6 +665,8 @@ Release 2.5.0 - UNRELEASED
 
   BUG FIXES 
 
+    HADOOP-10759. Remove hardcoded JAVA_HEAP_MAX. (Sam Liu via Eric Yang)
+
     HADOOP-10378. Typo in help printed by hdfs dfs -help.
     (Mit Desai via suresh)
 
@@ -813,6 +843,8 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10894. Fix dead link in ToolRunner documentation. (Akira Ajisaka
     via Arpit Agarwal)
 
+    HADOOP-10910. Increase findbugs maxHeap size. (wang)
+
   BREAKDOWN OF HADOOP-10514 SUBTASKS AND RELATED JIRAS
 
     HADOOP-10520. Extended attributes definition and FileSystem APIs for

Propchange: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1614551-1615843

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop
 Tue Aug  5 02:30:54 2014
@@ -35,6 +35,7 @@ function print_usage(){
   echo "  distcp <srcurl> <desturl> copy file or directories recursively"
   echo "  archive -archiveName NAME -p <parent path> <src>* <dest> create a 
hadoop archive"
   echo "  classpath            prints the class path needed to get the"
+  echo "  credential           interact with credential providers"
   echo "                       Hadoop jar and the required libraries"
   echo "  daemonlog            get/set the log level for each daemon"
   echo " or"
@@ -90,11 +91,6 @@ case $COMMAND in
     fi
     ;;
 
-  classpath)
-    echo $CLASSPATH
-    exit
-    ;;
-
   #core commands  
   *)
     # the core commands
@@ -118,6 +114,14 @@ case $COMMAND in
       CLASSPATH=${CLASSPATH}:${TOOL_PATH}
     elif [ "$COMMAND" = "credential" ] ; then
       CLASS=org.apache.hadoop.security.alias.CredentialShell
+    elif [ "$COMMAND" = "classpath" ] ; then
+      if [ "$#" -eq 1 ]; then
+        # No need to bother starting up a JVM for this simple case.
+        echo $CLASSPATH
+        exit
+      else
+        CLASS=org.apache.hadoop.util.Classpath
+      fi
     elif [[ "$COMMAND" = -*  ]] ; then
         # class and package names cannot begin with a -
         echo "Error: No command named \`$COMMAND' was found. Perhaps you meant 
\`hadoop ${COMMAND#-}'"

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
 Tue Aug  5 02:30:54 2014
@@ -149,8 +149,6 @@ if [[ -z $JAVA_HOME ]]; then
 fi
 
 JAVA=$JAVA_HOME/bin/java
-# some Java parameters
-JAVA_HEAP_MAX=-Xmx1000m 
 
 # check envvars which might override default args
 if [ "$HADOOP_HEAPSIZE" != "" ]; then

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd
 Tue Aug  5 02:30:54 2014
@@ -115,11 +115,14 @@ call :updatepath %HADOOP_BIN_PATH%
   )
 
   if %hadoop-command% == classpath (
-    @echo %CLASSPATH%
-    goto :eof
+    if not defined hadoop-command-arguments (
+      @rem No need to bother starting up a JVM for this simple case.
+      @echo %CLASSPATH%
+      exit /b
+    )
   )
   
-  set corecommands=fs version jar checknative distcp daemonlog archive
+  set corecommands=fs version jar checknative distcp daemonlog archive 
classpath
   for %%i in ( %corecommands% ) do (
     if %hadoop-command% == %%i set corecommand=true  
   )
@@ -175,6 +178,10 @@ call :updatepath %HADOOP_BIN_PATH%
   set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
   goto :eof
 
+:classpath
+  set CLASS=org.apache.hadoop.util.Classpath
+  goto :eof
+
 :updatepath
   set path_to_add=%*
   set current_path_comparable=%path%

Propchange: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1614551-1615843

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 Tue Aug  5 02:30:54 2014
@@ -1844,6 +1844,38 @@ public class Configuration implements It
   }
 
   /**
+   * Get the socket address for <code>hostProperty</code> as a
+   * <code>InetSocketAddress</code>. If <code>hostProperty</code> is
+   * <code>null</code>, <code>addressProperty</code> will be used. This
+   * is useful for cases where we want to differentiate between host
+   * bind address and address clients should use to establish connection.
+   *
+   * @param hostProperty bind host property name.
+   * @param addressProperty address property name.
+   * @param defaultAddressValue the default value
+   * @param defaultPort the default port
+   * @return InetSocketAddress
+   */
+  public InetSocketAddress getSocketAddr(
+      String hostProperty,
+      String addressProperty,
+      String defaultAddressValue,
+      int defaultPort) {
+
+    InetSocketAddress bindAddr = getSocketAddr(
+      addressProperty, defaultAddressValue, defaultPort);
+
+    final String host = get(hostProperty);
+
+    if (host == null || host.isEmpty()) {
+      return bindAddr;
+    }
+
+    return NetUtils.createSocketAddr(
+        host, bindAddr.getPort(), hostProperty);
+  }
+
+  /**
    * Get the socket address for <code>name</code> property as a
    * <code>InetSocketAddress</code>.
    * @param name property name.
@@ -1864,6 +1896,40 @@ public class Configuration implements It
   public void setSocketAddr(String name, InetSocketAddress addr) {
     set(name, NetUtils.getHostPortString(addr));
   }
+
+  /**
+   * Set the socket address a client can use to connect for the
+   * <code>name</code> property as a <code>host:port</code>.  The wildcard
+   * address is replaced with the local host's address. If the host and address
+   * properties are configured the host component of the address will be 
combined
+   * with the port component of the addr to generate the address.  This is to 
allow
+   * optional control over which host name is used in multi-home bind-host
+   * cases where a host can have multiple names
+   * @param hostProperty the bind-host configuration name
+   * @param addressProperty the service address configuration name
+   * @param defaultAddressValue the service default address configuration value
+   * @param addr InetSocketAddress of the service listener
+   * @return InetSocketAddress for clients to connect
+   */
+  public InetSocketAddress updateConnectAddr(
+      String hostProperty,
+      String addressProperty,
+      String defaultAddressValue,
+      InetSocketAddress addr) {
+
+    final String host = get(hostProperty);
+    final String connectHostPort = getTrimmed(addressProperty, 
defaultAddressValue);
+
+    if (host == null || host.isEmpty() || connectHostPort == null || 
connectHostPort.isEmpty()) {
+      //not our case, fall back to original logic
+      return updateConnectAddr(addressProperty, addr);
+    }
+
+    final String connectHost = connectHostPort.split(":")[0];
+    // Create connect address using client address hostname and server port.
+    return updateConnectAddr(addressProperty, NetUtils.createSocketAddrForHost(
+        connectHost, addr.getPort()));
+  }
   
   /**
    * Set the socket address a client can use to connect for the

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
 Tue Aug  5 02:30:54 2014
@@ -21,11 +21,13 @@ package org.apache.hadoop.crypto.key;
 import java.io.IOException;
 import java.security.GeneralSecurityException;
 import java.security.SecureRandom;
+
 import javax.crypto.Cipher;
 import javax.crypto.spec.IvParameterSpec;
 import javax.crypto.spec.SecretKeySpec;
 
 import com.google.common.base.Preconditions;
+
 import org.apache.hadoop.classification.InterfaceAudience;
 
 /**
@@ -97,7 +99,7 @@ public class KeyProviderCryptoExtension 
     public static EncryptedKeyVersion createForDecryption(String
         encryptionKeyVersionName, byte[] encryptedKeyIv,
         byte[] encryptedKeyMaterial) {
-      KeyVersion encryptedKeyVersion = new KeyVersion(null, null,
+      KeyVersion encryptedKeyVersion = new KeyVersion(null, EEK,
           encryptedKeyMaterial);
       return new EncryptedKeyVersion(null, encryptionKeyVersionName,
           encryptedKeyIv, encryptedKeyVersion);
@@ -258,6 +260,13 @@ public class KeyProviderCryptoExtension 
           keyProvider.getKeyVersion(encryptionKeyVersionName);
       Preconditions.checkNotNull(encryptionKey,
           "KeyVersion name '%s' does not exist", encryptionKeyVersionName);
+      Preconditions.checkArgument(
+              encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
+                    .equals(KeyProviderCryptoExtension.EEK),
+                "encryptedKey version name must be '%s', is '%s'",
+                KeyProviderCryptoExtension.EEK,
+                encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
+            );
       final byte[] encryptionKeyMaterial = encryptionKey.getMaterial();
       // Encryption key IV is determined from encrypted key's IV
       final byte[] encryptionIV =

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
 Tue Aug  5 02:30:54 2014
@@ -38,9 +38,9 @@ import org.apache.hadoop.util.ToolRunner
  */
 public class KeyShell extends Configured implements Tool {
   final static private String USAGE_PREFIX = "Usage: hadoop key " +
-               "[generic options]\n";
+      "[generic options]\n";
   final static private String COMMANDS =
-      "   [--help]\n" +
+      "   [-help]\n" +
       "   [" + CreateCommand.USAGE + "]\n" +
       "   [" + RollCommand.USAGE + "]\n" +
       "   [" + DeleteCommand.USAGE + "]\n" +
@@ -90,11 +90,11 @@ public class KeyShell extends Configured
   /**
    * Parse the command line arguments and initialize the data
    * <pre>
-   * % hadoop key create keyName [--size size] [--cipher algorithm]
-   *    [--provider providerPath]
-   * % hadoop key roll keyName [--provider providerPath]
+   * % hadoop key create keyName [-size size] [-cipher algorithm]
+   *    [-provider providerPath]
+   * % hadoop key roll keyName [-provider providerPath]
    * % hadoop key list [-provider providerPath]
-   * % hadoop key delete keyName [--provider providerPath] [-i]
+   * % hadoop key delete keyName [-provider providerPath] [-i]
    * </pre>
    * @param args Command line arguments.
    * @return 0 on success, 1 on failure.
@@ -107,47 +107,47 @@ public class KeyShell extends Configured
     for (int i = 0; i < args.length; i++) { // parse command line
       boolean moreTokens = (i < args.length - 1);
       if (args[i].equals("create")) {
-        String keyName = "--help";
+        String keyName = "-help";
         if (moreTokens) {
           keyName = args[++i];
         }
 
         command = new CreateCommand(keyName, options);
-        if ("--help".equals(keyName)) {
+        if ("-help".equals(keyName)) {
           printKeyShellUsage();
           return 1;
         }
       } else if (args[i].equals("delete")) {
-        String keyName = "--help";
+        String keyName = "-help";
         if (moreTokens) {
           keyName = args[++i];
         }
 
         command = new DeleteCommand(keyName);
-        if ("--help".equals(keyName)) {
+        if ("-help".equals(keyName)) {
           printKeyShellUsage();
           return 1;
         }
       } else if (args[i].equals("roll")) {
-        String keyName = "--help";
+        String keyName = "-help";
         if (moreTokens) {
           keyName = args[++i];
         }
 
         command = new RollCommand(keyName);
-        if ("--help".equals(keyName)) {
+        if ("-help".equals(keyName)) {
           printKeyShellUsage();
           return 1;
         }
       } else if ("list".equals(args[i])) {
         command = new ListCommand();
-      } else if ("--size".equals(args[i]) && moreTokens) {
+      } else if ("-size".equals(args[i]) && moreTokens) {
         options.setBitLength(Integer.parseInt(args[++i]));
-      } else if ("--cipher".equals(args[i]) && moreTokens) {
+      } else if ("-cipher".equals(args[i]) && moreTokens) {
         options.setCipher(args[++i]);
-      } else if ("--description".equals(args[i]) && moreTokens) {
+      } else if ("-description".equals(args[i]) && moreTokens) {
         options.setDescription(args[++i]);
-      } else if ("--attr".equals(args[i]) && moreTokens) {
+      } else if ("-attr".equals(args[i]) && moreTokens) {
         final String attrval[] = args[++i].split("=", 2);
         final String attr = attrval[0].trim();
         final String val = attrval[1].trim();
@@ -164,14 +164,14 @@ public class KeyShell extends Configured
           return 1;
         }
         attributes.put(attr, val);
-      } else if ("--provider".equals(args[i]) && moreTokens) {
+      } else if ("-provider".equals(args[i]) && moreTokens) {
         userSuppliedProvider = true;
         getConf().set(KeyProviderFactory.KEY_PROVIDER_PATH, args[++i]);
-      } else if ("--metadata".equals(args[i])) {
+      } else if ("-metadata".equals(args[i])) {
         getConf().setBoolean(LIST_METADATA, true);
-      } else if ("-i".equals(args[i]) || ("--interactive".equals(args[i]))) {
+      } else if ("-i".equals(args[i]) || ("-interactive".equals(args[i]))) {
         interactive = true;
-      } else if ("--help".equals(args[i])) {
+      } else if ("-help".equals(args[i])) {
         printKeyShellUsage();
         return 1;
       } else {
@@ -258,11 +258,11 @@ public class KeyShell extends Configured
 
   private class ListCommand extends Command {
     public static final String USAGE =
-        "list [--provider <provider>] [--metadata] [--help]";
+        "list [-provider <provider>] [-metadata] [-help]";
     public static final String DESC =
         "The list subcommand displays the keynames contained within\n" +
         "a particular provider as configured in core-site.xml or\n" +
-        "specified with the --provider argument. --metadata displays\n" +
+        "specified with the -provider argument. -metadata displays\n" +
         "the metadata.";
 
     private boolean metadata = false;
@@ -272,9 +272,9 @@ public class KeyShell extends Configured
       provider = getKeyProvider();
       if (provider == null) {
         out.println("There are no non-transient KeyProviders configured.\n"
-          + "Use the --provider option to specify a provider. If you\n"
+          + "Use the -provider option to specify a provider. If you\n"
           + "want to list a transient provider then you must use the\n"
-          + "--provider argument.");
+          + "-provider argument.");
         rc = false;
       }
       metadata = getConf().getBoolean(LIST_METADATA, false);
@@ -310,10 +310,10 @@ public class KeyShell extends Configured
   }
 
   private class RollCommand extends Command {
-    public static final String USAGE = "roll <keyname> [--provider <provider>] 
[--help]";
+    public static final String USAGE = "roll <keyname> [-provider <provider>] 
[-help]";
     public static final String DESC =
       "The roll subcommand creates a new version for the specified key\n" +
-      "within the provider indicated using the --provider argument\n";
+      "within the provider indicated using the -provider argument\n";
 
     String keyName = null;
 
@@ -326,13 +326,13 @@ public class KeyShell extends Configured
       provider = getKeyProvider();
       if (provider == null) {
         out.println("There are no valid KeyProviders configured. The key\n" +
-          "has not been rolled. Use the --provider option to specify\n" +
+          "has not been rolled. Use the -provider option to specify\n" +
           "a provider.");
         rc = false;
       }
       if (keyName == null) {
         out.println("Please provide a <keyname>.\n" +
-          "See the usage description by using --help.");
+          "See the usage description by using -help.");
         rc = false;
       }
       return rc;
@@ -367,11 +367,11 @@ public class KeyShell extends Configured
   }
 
   private class DeleteCommand extends Command {
-    public static final String USAGE = "delete <keyname> [--provider 
<provider>] [--help]";
+    public static final String USAGE = "delete <keyname> [-provider 
<provider>] [-help]";
     public static final String DESC =
         "The delete subcommand deletes all versions of the key\n" +
         "specified by the <keyname> argument from within the\n" +
-        "provider specified --provider.";
+        "provider specified -provider.";
 
     String keyName = null;
     boolean cont = true;
@@ -385,12 +385,12 @@ public class KeyShell extends Configured
       provider = getKeyProvider();
       if (provider == null) {
         out.println("There are no valid KeyProviders configured. Nothing\n"
-          + "was deleted. Use the --provider option to specify a provider.");
+          + "was deleted. Use the -provider option to specify a provider.");
         return false;
       }
       if (keyName == null) {
         out.println("There is no keyName specified. Please specify a " +
-            "<keyname>. See the usage description with --help.");
+            "<keyname>. See the usage description with -help.");
         return false;
       }
       if (interactive) {
@@ -436,19 +436,19 @@ public class KeyShell extends Configured
 
   private class CreateCommand extends Command {
     public static final String USAGE =
-      "create <keyname> [--cipher <cipher>] [--size <size>]\n" +
-      "                     [--description <description>]\n" +
-      "                     [--attr <attribute=value>]\n" +
-      "                     [--provider <provider>] [--help]";
+      "create <keyname> [-cipher <cipher>] [-size <size>]\n" +
+      "                     [-description <description>]\n" +
+      "                     [-attr <attribute=value>]\n" +
+      "                     [-provider <provider>] [-help]";
     public static final String DESC =
       "The create subcommand creates a new key for the name specified\n" +
       "by the <keyname> argument within the provider specified by the\n" +
-      "--provider argument. You may specify a cipher with the --cipher\n" +
+      "-provider argument. You may specify a cipher with the -cipher\n" +
       "argument. The default cipher is currently \"AES/CTR/NoPadding\".\n" +
       "The default keysize is 256. You may specify the requested key\n" +
-      "length using the --size argument. Arbitrary attribute=value\n" +
-      "style attributes may be specified using the --attr argument.\n" +
-      "--attr may be specified multiple times, once per attribute.\n";
+      "length using the -size argument. Arbitrary attribute=value\n" +
+      "style attributes may be specified using the -attr argument.\n" +
+      "-attr may be specified multiple times, once per attribute.\n";
 
     final String keyName;
     final Options options;
@@ -463,13 +463,13 @@ public class KeyShell extends Configured
       provider = getKeyProvider();
       if (provider == null) {
         out.println("There are no valid KeyProviders configured. No key\n" +
-          " was created. You can use the --provider option to specify\n" +
+          " was created. You can use the -provider option to specify\n" +
           " a provider to use.");
         rc = false;
       }
       if (keyName == null) {
         out.println("Please provide a <keyname>. See the usage description" +
-          " with --help.");
+          " with -help.");
         rc = false;
       }
       return rc;

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
 Tue Aug  5 02:30:54 2014
@@ -653,7 +653,7 @@ public class KMSClientProvider extends K
         encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
             .equals(KeyProviderCryptoExtension.EEK),
         "encryptedKey version name must be '%s', is '%s'",
-        KeyProviderCryptoExtension.EK,
+        KeyProviderCryptoExtension.EEK,
         encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
     );
     checkNotNull(encryptedKeyVersion.getEncryptedKeyVersion(), "encryptedKey");

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
 Tue Aug  5 02:30:54 2014
@@ -43,6 +43,7 @@ import org.apache.hadoop.fs.Options.Crea
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.InvalidPathException;
 import org.apache.hadoop.security.AccessControlException;
@@ -805,6 +806,18 @@ public abstract class AbstractFileSystem
 
   /**
    * The specification of this method matches that of
+   * {@link FileContext#access(Path, FsAction)}
+   * except that an UnresolvedLinkException may be thrown if a symlink is
+   * encountered in the path.
+   */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, UnresolvedLinkException, IOException {
+    FileSystem.checkAccessPermissions(this.getFileStatus(path), mode);
+  }
+
+  /**
+   * The specification of this method matches that of
    * {@link FileContext#getFileLinkStatus(Path)}
    * except that an UnresolvedLinkException may be thrown if a symlink is  
    * encountered in the path leading up to the final path component.

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
 Tue Aug  5 02:30:54 2014
@@ -44,6 +44,7 @@ import org.apache.hadoop.fs.FileSystem.S
 import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_DEFAULT;
@@ -1109,6 +1110,55 @@ public final class FileContext {
   }
 
   /**
+   * Checks if the user can access a path.  The mode specifies which access
+   * checks to perform.  If the requested permissions are granted, then the
+   * method returns normally.  If access is denied, then the method throws an
+   * {@link AccessControlException}.
+   * <p/>
+   * The default implementation of this method calls {@link 
#getFileStatus(Path)}
+   * and checks the returned permissions against the requested permissions.
+   * Note that the getFileStatus call will be subject to authorization checks.
+   * Typically, this requires search (execute) permissions on each directory in
+   * the path's prefix, but this is implementation-defined.  Any file system
+   * that provides a richer authorization model (such as ACLs) may override the
+   * default implementation so that it checks against that model instead.
+   * <p>
+   * In general, applications should avoid using this method, due to the risk 
of
+   * time-of-check/time-of-use race conditions.  The permissions on a file may
+   * change immediately after the access call returns.  Most applications 
should
+   * prefer running specific file system actions as the desired user 
represented
+   * by a {@link UserGroupInformation}.
+   *
+   * @param path Path to check
+   * @param mode type of access to check
+   * @throws AccessControlException if access is denied
+   * @throws FileNotFoundException if the path does not exist
+   * @throws UnsupportedFileSystemException if file system for 
<code>path</code>
+   *   is not supported
+   * @throws IOException see specific implementation
+   * 
+   * Exceptions applicable to file systems accessed over RPC:
+   * @throws RpcClientException If an exception occurred in the RPC client
+   * @throws RpcServerException If an exception occurred in the RPC server
+   * @throws UnexpectedServerException If server implementation throws 
+   *           undeclared exception to RPC server
+   */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
+  public void access(final Path path, final FsAction mode)
+      throws AccessControlException, FileNotFoundException,
+      UnsupportedFileSystemException, IOException {
+    final Path absPath = fixRelativePart(path);
+    new FSLinkResolver<Void>() {
+      @Override
+      public Void next(AbstractFileSystem fs, Path p) throws IOException,
+          UnresolvedLinkException {
+        fs.access(p, mode);
+        return null;
+      }
+    }.resolve(this, absPath);
+  }
+
+  /**
    * Return a file status object that represents the path. If the path 
    * refers to a symlink then the FileStatus of the symlink is returned.
    * The behavior is equivalent to #getFileStatus() if the underlying

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 Tue Aug  5 02:30:54 2014
@@ -25,6 +25,7 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -50,6 +51,7 @@ import org.apache.hadoop.fs.Options.Chec
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.MultipleIOException;
 import org.apache.hadoop.io.Text;
@@ -2073,6 +2075,71 @@ public abstract class FileSystem extends
   public abstract FileStatus getFileStatus(Path f) throws IOException;
 
   /**
+   * Checks if the user can access a path.  The mode specifies which access
+   * checks to perform.  If the requested permissions are granted, then the
+   * method returns normally.  If access is denied, then the method throws an
+   * {@link AccessControlException}.
+   * <p/>
+   * The default implementation of this method calls {@link 
#getFileStatus(Path)}
+   * and checks the returned permissions against the requested permissions.
+   * Note that the getFileStatus call will be subject to authorization checks.
+   * Typically, this requires search (execute) permissions on each directory in
+   * the path's prefix, but this is implementation-defined.  Any file system
+   * that provides a richer authorization model (such as ACLs) may override the
+   * default implementation so that it checks against that model instead.
+   * <p>
+   * In general, applications should avoid using this method, due to the risk 
of
+   * time-of-check/time-of-use race conditions.  The permissions on a file may
+   * change immediately after the access call returns.  Most applications 
should
+   * prefer running specific file system actions as the desired user 
represented
+   * by a {@link UserGroupInformation}.
+   *
+   * @param path Path to check
+   * @param mode type of access to check
+   * @throws AccessControlException if access is denied
+   * @throws FileNotFoundException if the path does not exist
+   * @throws IOException see specific implementation
+   */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, IOException {
+    checkAccessPermissions(this.getFileStatus(path), mode);
+  }
+
+  /**
+   * This method provides the default implementation of
+   * {@link #access(Path, FsAction)}.
+   *
+   * @param stat FileStatus to check
+   * @param mode type of access to check
+   * @throws IOException for any error
+   */
+  @InterfaceAudience.Private
+  static void checkAccessPermissions(FileStatus stat, FsAction mode)
+      throws IOException {
+    FsPermission perm = stat.getPermission();
+    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+    String user = ugi.getShortUserName();
+    List<String> groups = Arrays.asList(ugi.getGroupNames());
+    if (user.equals(stat.getOwner())) {
+      if (perm.getUserAction().implies(mode)) {
+        return;
+      }
+    } else if (groups.contains(stat.getGroup())) {
+      if (perm.getGroupAction().implies(mode)) {
+        return;
+      }
+    } else {
+      if (perm.getOtherAction().implies(mode)) {
+        return;
+      }
+    }
+    throw new AccessControlException(String.format(
+      "Permission denied: user=%s, path=\"%s\":%s:%s:%s%s", user, 
stat.getPath(),
+      stat.getOwner(), stat.getGroup(), stat.isDirectory() ? "d" : "-", perm));
+  }
+
+  /**
    * See {@link FileContext#fixRelativePart}
    */
   protected Path fixRelativePart(Path p) {

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
 Tue Aug  5 02:30:54 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.security.AccessControlException;
@@ -397,6 +398,12 @@ public class FilterFileSystem extends Fi
     return fs.getFileStatus(f);
   }
 
+  @Override
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, IOException {
+    fs.access(path, mode);
+  }
+
   public void createSymlink(final Path target, final Path link,
       final boolean createParent) throws AccessControlException,
       FileAlreadyExistsException, FileNotFoundException,

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
 Tue Aug  5 02:30:54 2014
@@ -29,6 +29,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.fs.FileSystem.Statistics;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.security.AccessControlException;
@@ -120,6 +121,13 @@ public abstract class FilterFs extends A
   }
 
   @Override
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, UnresolvedLinkException, IOException {
+    checkPath(path);
+    myFs.access(path, mode);
+  }
+
+  @Override
   public FileStatus getFileLinkStatus(final Path f) 
     throws IOException, UnresolvedLinkException {
     checkPath(f);

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
 Tue Aug  5 02:30:54 2014
@@ -118,7 +118,11 @@ class Delete {
         } catch(FileNotFoundException fnfe) {
           throw fnfe;
         } catch (IOException ioe) {
-            throw new IOException(ioe.getMessage() + ". Consider using 
-skipTrash option", ioe);
+          String msg = ioe.getMessage();
+          if (ioe.getCause() != null) {
+            msg += ": " + ioe.getCause().getMessage();
+         }
+          throw new IOException(msg + ". Consider using -skipTrash option", 
ioe);
         }
       }
       return success;

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
 Tue Aug  5 02:30:54 2014
@@ -41,7 +41,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.XAttrSetFlag;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.Progressable;
 
 /**
@@ -223,6 +225,12 @@ class ChRootedFileSystem extends FilterF
   }
 
   @Override
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, IOException {
+    super.access(fullPath(path), mode);
+  }
+
+  @Override
   public FsStatus getStatus(Path p) throws IOException {
     return super.getStatus(fullPath(p));
   }

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
 Tue Aug  5 02:30:54 2014
@@ -41,7 +41,9 @@ import org.apache.hadoop.fs.UnresolvedLi
 import org.apache.hadoop.fs.XAttrSetFlag;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 
@@ -200,6 +202,11 @@ class ChRootedFs extends AbstractFileSys
     return myFs.getFileStatus(fullPath(f));
   }
 
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, UnresolvedLinkException, IOException {
+    myFs.access(fullPath(path), mode);
+  }
+
   @Override
   public FileStatus getFileLinkStatus(final Path f) 
     throws IOException, UnresolvedLinkException {

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
 Tue Aug  5 02:30:54 2014
@@ -51,6 +51,7 @@ import org.apache.hadoop.fs.XAttrSetFlag
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
 import org.apache.hadoop.fs.permission.AclUtil;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.viewfs.InodeTree.INode;
 import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink;
@@ -359,7 +360,14 @@ public class ViewFileSystem extends File
     return new ViewFsFileStatus(status, this.makeQualified(f));
   }
   
-  
+  @Override
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, IOException {
+    InodeTree.ResolveResult<FileSystem> res =
+      fsState.resolve(getUriPath(path), true);
+    res.targetFileSystem.access(res.remainingPath, mode);
+  }
+
   @Override
   public FileStatus[] listStatus(final Path f) throws AccessControlException,
       FileNotFoundException, IOException {

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
 Tue Aug  5 02:30:54 2014
@@ -54,6 +54,7 @@ import org.apache.hadoop.fs.local.LocalC
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclUtil;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.viewfs.InodeTree.INode;
 import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink;
@@ -353,6 +354,14 @@ public class ViewFs extends AbstractFile
   }
 
   @Override
+  public void access(Path path, FsAction mode) throws AccessControlException,
+      FileNotFoundException, UnresolvedLinkException, IOException {
+    InodeTree.ResolveResult<AbstractFileSystem> res =
+      fsState.resolve(getUriPath(path), true);
+    res.targetFileSystem.access(res.remainingPath, mode);
+  }
+
+  @Override
   public FileStatus getFileLinkStatus(final Path f)
      throws AccessControlException, FileNotFoundException,
      UnsupportedFileSystemException, IOException {

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
 Tue Aug  5 02:30:54 2014
@@ -33,6 +33,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.HardLink;
 import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.Shell;
@@ -823,6 +824,14 @@ public class NativeIO {
     }
   }
 
+  public static void link(File src, File dst) throws IOException {
+    if (!nativeLoaded) {
+      HardLink.createHardLink(src, dst);
+    } else {
+      link0(src.getAbsolutePath(), dst.getAbsolutePath());
+    }
+  }
+
   /**
    * A version of renameTo that throws a descriptive exception when it fails.
    *
@@ -833,4 +842,7 @@ public class NativeIO {
    */
   private static native void renameTo0(String src, String dst)
       throws NativeIOException;
+
+  private static native void link0(String src, String dst)
+      throws NativeIOException;
 }

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 Tue Aug  5 02:30:54 2014
@@ -77,7 +77,8 @@ public class SecurityUtil {
    * For use only by tests and initialization
    */
   @InterfaceAudience.Private
-  static void setTokenServiceUseIp(boolean flag) {
+  @VisibleForTesting
+  public static void setTokenServiceUseIp(boolean flag) {
     useIpForTokenService = flag;
     hostResolver = !useIpForTokenService
         ? new QualifiedHostResolver()

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
 Tue Aug  5 02:30:54 2014
@@ -67,11 +67,11 @@ public class CredentialShell extends Con
       if (command.validate()) {
           command.execute();
       } else {
-        exitCode = -1;
+        exitCode = 1;
       }
     } catch (Exception e) {
       e.printStackTrace(err);
-      return -1;
+      return 1;
     }
     return exitCode;
   }
@@ -79,47 +79,54 @@ public class CredentialShell extends Con
   /**
    * Parse the command line arguments and initialize the data
    * <pre>
-   * % hadoop alias create alias [--provider providerPath]
-   * % hadoop alias list [-provider providerPath]
-   * % hadoop alias delete alias [--provider providerPath] [-i]
+   * % hadoop credential create alias [-provider providerPath]
+   * % hadoop credential list [-provider providerPath]
+   * % hadoop credential delete alias [-provider providerPath] [-i]
    * </pre>
    * @param args
-   * @return
+   * @return 0 if the argument(s) were recognized, 1 otherwise
    * @throws IOException
    */
-  private int init(String[] args) throws IOException {
+  protected int init(String[] args) throws IOException {
+    // no args should print the help message
+    if (0 == args.length) {
+      printCredShellUsage();
+      ToolRunner.printGenericCommandUsage(System.err);
+      return 1;
+    }
+
     for (int i = 0; i < args.length; i++) { // parse command line
       if (args[i].equals("create")) {
         String alias = args[++i];
         command = new CreateCommand(alias);
-        if (alias.equals("--help")) {
+        if (alias.equals("-help")) {
           printCredShellUsage();
-          return -1;
+          return 0;
         }
       } else if (args[i].equals("delete")) {
         String alias = args[++i];
         command = new DeleteCommand(alias);
-        if (alias.equals("--help")) {
+        if (alias.equals("-help")) {
           printCredShellUsage();
-          return -1;
+          return 0;
         }
       } else if (args[i].equals("list")) {
         command = new ListCommand();
-      } else if (args[i].equals("--provider")) {
+      } else if (args[i].equals("-provider")) {
         userSuppliedProvider = true;
         getConf().set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, 
             args[++i]);
-      } else if (args[i].equals("-i") || (args[i].equals("--interactive"))) {
+      } else if (args[i].equals("-i") || (args[i].equals("-interactive"))) {
         interactive = true;
-      } else if (args[i].equals("-v") || (args[i].equals("--value"))) {
+      } else if (args[i].equals("-v") || (args[i].equals("-value"))) {
         value = args[++i];
-      } else if (args[i].equals("--help")) {
+      } else if (args[i].equals("-help")) {
         printCredShellUsage();
-        return -1;
+        return 0;
       } else {
         printCredShellUsage();
         ToolRunner.printGenericCommandUsage(System.err);
-        return -1;
+        return 1;
       }
     }
     return 0;
@@ -188,20 +195,20 @@ public class CredentialShell extends Con
   }
 
   private class ListCommand extends Command {
-    public static final String USAGE = "list <alias> [--provider] [--help]";
+    public static final String USAGE = "list [-provider] [-help]";
     public static final String DESC =
         "The list subcommand displays the aliases contained within \n" +
         "a particular provider - as configured in core-site.xml or " +
-        "indicated\nthrough the --provider argument.";
+        "indicated\nthrough the -provider argument.";
 
     public boolean validate() {
       boolean rc = true;
       provider = getCredentialProvider();
       if (provider == null) {
         out.println("There are no non-transient CredentialProviders 
configured.\n"
-            + "Consider using the --provider option to indicate the provider\n"
+            + "Consider using the -provider option to indicate the provider\n"
             + "to use. If you want to list a transient provider then you\n"
-            + "you MUST use the --provider argument.");
+            + "you MUST use the -provider argument.");
         rc = false;
       }
       return rc;
@@ -229,11 +236,11 @@ public class CredentialShell extends Con
   }
 
   private class DeleteCommand extends Command {
-    public static final String USAGE = "delete <alias> [--provider] [--help]";
+    public static final String USAGE = "delete <alias> [-provider] [-help]";
     public static final String DESC =
         "The delete subcommand deletes the credenital\n" +
         "specified as the <alias> argument from within the provider\n" +
-        "indicated through the --provider argument";
+        "indicated through the -provider argument";
 
     String alias = null;
     boolean cont = true;
@@ -248,13 +255,13 @@ public class CredentialShell extends Con
       if (provider == null) {
         out.println("There are no valid CredentialProviders configured.\n"
             + "Nothing will be deleted.\n"
-            + "Consider using the --provider option to indicate the provider"
+            + "Consider using the -provider option to indicate the provider"
             + " to use.");
         return false;
       }
       if (alias == null) {
         out.println("There is no alias specified. Please provide the" +
-            "mandatory <alias>. See the usage description with --help.");
+            "mandatory <alias>. See the usage description with -help.");
         return false;
       }
       if (interactive) {
@@ -299,11 +306,11 @@ public class CredentialShell extends Con
   }
 
   private class CreateCommand extends Command {
-    public static final String USAGE = "create <alias> [--provider] [--help]";
+    public static final String USAGE = "create <alias> [-provider] [-help]";
     public static final String DESC =
         "The create subcommand creates a new credential for the name 
specified\n" +
         "as the <alias> argument within the provider indicated through\n" +
-        "the --provider argument.";
+        "the -provider argument.";
 
     String alias = null;
 
@@ -317,13 +324,13 @@ public class CredentialShell extends Con
       if (provider == null) {
         out.println("There are no valid CredentialProviders configured." +
                        "\nCredential will not be created.\n"
-            + "Consider using the --provider option to indicate the provider" +
+            + "Consider using the -provider option to indicate the provider" +
             " to use.");
         rc = false;
       }
       if (alias == null) {
         out.println("There is no alias specified. Please provide the" +
-                       "mandatory <alias>. See the usage description with 
--help.");
+            "mandatory <alias>. See the usage description with -help.");
         rc = false;
       }
       return rc;

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 Tue Aug  5 02:30:54 2014
@@ -78,6 +78,20 @@ public class DiskChecker {
            (mkdirsWithExistsCheck(new File(parent)) &&
                                       (canonDir.mkdir() || canonDir.exists()));
   }
+
+  /**
+   * Recurse down a directory tree, checking all child directories.
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirs(File dir) throws DiskErrorException {
+    checkDir(dir);
+    for (File child : dir.listFiles()) {
+      if (child.isDirectory()) {
+        checkDirs(child);
+      }
+    }
+  }
   
   /**
    * Create the directory if it doesn't exist and check that dir is readable,

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
 Tue Aug  5 02:30:54 2014
@@ -27,6 +27,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Date;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.List;
@@ -377,6 +378,19 @@ public class StringUtils {
     return str.trim().split("\\s*,\\s*");
   }
 
+  /**
+   * Trims all the strings in a Collection<String> and returns a Set<String>.
+   * @param strings
+   * @return
+   */
+  public static Set<String> getTrimmedStrings(Collection<String> strings) {
+    Set<String> trimmedStrings = new HashSet<String>();
+    for (String string: strings) {
+      trimmedStrings.add(string.trim());
+    }
+    return trimmedStrings;
+  }
+
   final public static String[] emptyStringArray = {};
   final public static char COMMA = ',';
   final public static String COMMA_STR = ",";

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 Tue Aug  5 02:30:54 2014
@@ -1054,6 +1054,43 @@ done:
 #endif
 }
 
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_link0(JNIEnv *env,
+jclass clazz, jstring jsrc, jstring jdst)
+{
+#ifdef UNIX
+  const char *src = NULL, *dst = NULL;
+
+  src = (*env)->GetStringUTFChars(env, jsrc, NULL);
+  if (!src) goto done; // exception was thrown
+  dst = (*env)->GetStringUTFChars(env, jdst, NULL);
+  if (!dst) goto done; // exception was thrown
+  if (link(src, dst)) {
+    throw_ioe(env, errno);
+  }
+
+done:
+  if (src) (*env)->ReleaseStringUTFChars(env, jsrc, src);
+  if (dst) (*env)->ReleaseStringUTFChars(env, jdst, dst);
+#endif
+
+#ifdef WINDOWS
+  LPCTSTR src = NULL, dst = NULL;
+
+  src = (LPCTSTR) (*env)->GetStringChars(env, jsrc, NULL);
+  if (!src) goto done; // exception was thrown
+  dst = (LPCTSTR) (*env)->GetStringChars(env, jdst, NULL);
+  if (!dst) goto done; // exception was thrown
+  if (!CreateHardLink(dst, src, NULL)) {
+    throw_ioe(env, GetLastError());
+  }
+
+done:
+  if (src) (*env)->ReleaseStringChars(env, jsrc, src);
+  if (dst) (*env)->ReleaseStringChars(env, jdst, dst);
+#endif
+}
+
 JNIEXPORT jlong JNICALL
 Java_org_apache_hadoop_io_nativeio_NativeIO_getMemlockLimit0(
 JNIEnv *env, jclass clazz)

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
 Tue Aug  5 02:30:54 2014
@@ -296,9 +296,24 @@ User Commands
 * <<<classpath>>>
 
    Prints the class path needed to get the Hadoop jar and the required
-   libraries.
+   libraries.  If called without arguments, then prints the classpath set up by
+   the command scripts, which is likely to contain wildcards in the classpath
+   entries.  Additional options print the classpath after wildcard expansion or
+   write the classpath into the manifest of a jar file.  The latter is useful 
in
+   environments where wildcards cannot be used and the expanded classpath 
exceeds
+   the maximum supported command line length.
 
-   Usage: <<<hadoop classpath>>>
+   Usage: <<<hadoop classpath [--glob|--jar <path>|-h|--help]>>>
+
+*-----------------+-----------------------------------------------------------+
+|| COMMAND_OPTION || Description
+*-----------------+-----------------------------------------------------------+
+| --glob          | expand wildcards
+*-----------------+-----------------------------------------------------------+
+| --jar <path>    | write classpath as manifest in jar named <path>
+*-----------------+-----------------------------------------------------------+
+| -h, --help      | print help
+*-----------------+-----------------------------------------------------------+
 
 Administration Commands
 

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java
 Tue Aug  5 02:30:54 2014
@@ -26,10 +26,10 @@ import javax.crypto.spec.IvParameterSpec
 import javax.crypto.spec.SecretKeySpec;
 
 import org.apache.hadoop.conf.Configuration;
+import 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-
 import static org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
@@ -118,8 +118,15 @@ public class TestKeyProviderCryptoExtens
         new IvParameterSpec(KeyProviderCryptoExtension.EncryptedKeyVersion
             .deriveIV(encryptedKeyIv)));
     final byte[] manualMaterial = cipher.doFinal(encryptedKeyMaterial);
+
+    // Test the createForDecryption factory method
+    EncryptedKeyVersion eek2 =
+        EncryptedKeyVersion.createForDecryption(
+            eek.getEncryptionKeyVersionName(), eek.getEncryptedKeyIv(),
+            eek.getEncryptedKeyVersion().getMaterial());
+
     // Decrypt it with the API
-    KeyVersion decryptedKey = kpExt.decryptEncryptedKey(eek);
+    KeyVersion decryptedKey = kpExt.decryptEncryptedKey(eek2);
     final byte[] apiMaterial = decryptedKey.getMaterial();
 
     assertArrayEquals("Wrong key material from decryptEncryptedKey",

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
 Tue Aug  5 02:30:54 2014
@@ -73,7 +73,7 @@ public class TestKeyShell {
   private void deleteKey(KeyShell ks, String keyName) throws Exception {
     int rc;
     outContent.reset();
-    final String[] delArgs = {"delete", keyName, "--provider", jceksProvider};
+    final String[] delArgs = {"delete", keyName, "-provider", jceksProvider};
     rc = ks.run(delArgs);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains(keyName + " has been " +
@@ -90,8 +90,8 @@ public class TestKeyShell {
   private String listKeys(KeyShell ks, boolean wantMetadata) throws Exception {
     int rc;
     outContent.reset();
-    final String[] listArgs = {"list", "--provider", jceksProvider };
-    final String[] listArgsM = {"list", "--metadata", "--provider", 
jceksProvider };
+    final String[] listArgs = {"list", "-provider", jceksProvider };
+    final String[] listArgsM = {"list", "-metadata", "-provider", 
jceksProvider };
     rc = ks.run(wantMetadata ? listArgsM : listArgs);
     assertEquals(0, rc);
     return outContent.toString();
@@ -106,7 +106,7 @@ public class TestKeyShell {
     ks.setConf(new Configuration());
 
     outContent.reset();
-    final String[] args1 = {"create", keyName, "--provider", jceksProvider};
+    final String[] args1 = {"create", keyName, "-provider", jceksProvider};
     rc = ks.run(args1);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains(keyName + " has been " +
@@ -121,7 +121,7 @@ public class TestKeyShell {
     assertTrue(listOut.contains("created"));
 
     outContent.reset();
-    final String[] args2 = {"roll", keyName, "--provider", jceksProvider};
+    final String[] args2 = {"roll", keyName, "-provider", jceksProvider};
     rc = ks.run(args2);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("key1 has been successfully " +
@@ -137,8 +137,8 @@ public class TestKeyShell {
   @Test
   public void testKeySuccessfulCreationWithDescription() throws Exception {
     outContent.reset();
-    final String[] args1 = {"create", "key1", "--provider", jceksProvider,
-                      "--description", "someDescription"};
+    final String[] args1 = {"create", "key1", "-provider", jceksProvider,
+                      "-description", "someDescription"};
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
@@ -154,7 +154,7 @@ public class TestKeyShell {
 
   @Test
   public void testInvalidKeySize() throws Exception {
-    final String[] args1 = {"create", "key1", "--size", "56", "--provider",
+    final String[] args1 = {"create", "key1", "-size", "56", "-provider",
             jceksProvider};
 
     int rc = 0;
@@ -167,7 +167,7 @@ public class TestKeyShell {
 
   @Test
   public void testInvalidCipher() throws Exception {
-    final String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider",
+    final String[] args1 = {"create", "key1", "-cipher", "LJM", "-provider",
             jceksProvider};
 
     int rc = 0;
@@ -180,7 +180,7 @@ public class TestKeyShell {
 
   @Test
   public void testInvalidProvider() throws Exception {
-    final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
+    final String[] args1 = {"create", "key1", "-cipher", "AES", "-provider",
       "sdff://file/tmp/keystore.jceks"};
     
     int rc = 0;
@@ -194,7 +194,7 @@ public class TestKeyShell {
 
   @Test
   public void testTransientProviderWarning() throws Exception {
-    final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
+    final String[] args1 = {"create", "key1", "-cipher", "AES", "-provider",
       "user:///"};
     
     int rc = 0;
@@ -224,8 +224,8 @@ public class TestKeyShell {
   @Test
   public void testFullCipher() throws Exception {
     final String keyName = "key1";
-    final String[] args1 = {"create", keyName, "--cipher", 
"AES/CBC/pkcs5Padding",
-        "--provider", jceksProvider};
+    final String[] args1 = {"create", keyName, "-cipher", 
"AES/CBC/pkcs5Padding",
+        "-provider", jceksProvider};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
@@ -245,8 +245,8 @@ public class TestKeyShell {
     ks.setConf(new Configuration());
 
     /* Simple creation test */
-    final String[] args1 = {"create", "keyattr1", "--provider", jceksProvider,
-            "--attr", "foo=bar"};
+    final String[] args1 = {"create", "keyattr1", "-provider", jceksProvider,
+            "-attr", "foo=bar"};
     rc = ks.run(args1);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("keyattr1 has been " +
@@ -259,8 +259,8 @@ public class TestKeyShell {
 
     /* Negative tests: no attribute */
     outContent.reset();
-    final String[] args2 = {"create", "keyattr2", "--provider", jceksProvider,
-            "--attr", "=bar"};
+    final String[] args2 = {"create", "keyattr2", "-provider", jceksProvider,
+            "-attr", "=bar"};
     rc = ks.run(args2);
     assertEquals(1, rc);
 
@@ -288,10 +288,10 @@ public class TestKeyShell {
 
     /* Test several attrs together... */
     outContent.reset();
-    final String[] args3 = {"create", "keyattr3", "--provider", jceksProvider,
-            "--attr", "foo = bar",
-            "--attr", " glarch =baz  ",
-            "--attr", "abc=def"};
+    final String[] args3 = {"create", "keyattr3", "-provider", jceksProvider,
+            "-attr", "foo = bar",
+            "-attr", " glarch =baz  ",
+            "-attr", "abc=def"};
     rc = ks.run(args3);
     assertEquals(0, rc);
 
@@ -304,9 +304,9 @@ public class TestKeyShell {
 
     /* Negative test - repeated attributes should fail */
     outContent.reset();
-    final String[] args4 = {"create", "keyattr4", "--provider", jceksProvider,
-            "--attr", "foo=bar",
-            "--attr", "foo=glarch"};
+    final String[] args4 = {"create", "keyattr4", "-provider", jceksProvider,
+            "-attr", "foo=bar",
+            "-attr", "foo=glarch"};
     rc = ks.run(args4);
     assertEquals(1, rc);
 

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
 Tue Aug  5 02:30:54 2014
@@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
@@ -201,6 +202,8 @@ public class TestHarFileSystem {
     public void removeXAttr(Path path, String name) throws IOException;
 
     public AclStatus getAclStatus(Path path) throws IOException;
+
+    public void access(Path path, FsAction mode) throws IOException;
   }
 
   @Test

Modified: 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java?rev=1615844&r1=1615843&r2=1615844&view=diff
==============================================================================
--- 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
 (original)
+++ 
hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
 Tue Aug  5 02:30:54 2014
@@ -17,16 +17,18 @@
  */
 package org.apache.hadoop.security.alias;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.PrintStream;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.alias.CredentialShell.PasswordReader;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -45,7 +47,7 @@ public class TestCredShell {
   @Test
   public void testCredentialSuccessfulLifecycle() throws Exception {
     outContent.reset();
-    String[] args1 = {"create", "credential1", "--value", "p@ssw0rd", 
"--provider", 
+    String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", 
"-provider",
         "jceks://file" + tmpDir + "/credstore.jceks"};
     int rc = 0;
     CredentialShell cs = new CredentialShell();
@@ -56,14 +58,14 @@ public class TestCredShell {
                "created."));
 
     outContent.reset();
-    String[] args2 = {"list", "--provider", 
+    String[] args2 = {"list", "-provider",
         "jceks://file" + tmpDir + "/credstore.jceks"};
     rc = cs.run(args2);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("credential1"));
 
     outContent.reset();
-    String[] args4 = {"delete", "credential1", "--provider", 
+    String[] args4 = {"delete", "credential1", "-provider",
         "jceks://file" + tmpDir + "/credstore.jceks"};
     rc = cs.run(args4);
     assertEquals(0, rc);
@@ -71,7 +73,7 @@ public class TestCredShell {
                "deleted."));
 
     outContent.reset();
-    String[] args5 = {"list", "--provider", 
+    String[] args5 = {"list", "-provider",
         "jceks://file" + tmpDir + "/credstore.jceks"};
     rc = cs.run(args5);
     assertEquals(0, rc);
@@ -80,21 +82,21 @@ public class TestCredShell {
 
   @Test
   public void testInvalidProvider() throws Exception {
-    String[] args1 = {"create", "credential1", "--value", "p@ssw0rd", 
"--provider", 
+    String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", 
"-provider",
       "sdff://file/tmp/credstore.jceks"};
     
     int rc = 0;
     CredentialShell cs = new CredentialShell();
     cs.setConf(new Configuration());
     rc = cs.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("There are no valid " +
                "CredentialProviders configured."));
   }
 
   @Test
   public void testTransientProviderWarning() throws Exception {
-    String[] args1 = {"create", "credential1", "--value", "p@ssw0rd", 
"--provider", 
+    String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", 
"-provider",
       "user:///"};
     
     int rc = 0;
@@ -105,7 +107,7 @@ public class TestCredShell {
     assertTrue(outContent.toString().contains("WARNING: you are modifying a " +
                "transient provider."));
 
-    String[] args2 = {"delete", "credential1", "--provider", "user:///"};
+    String[] args2 = {"delete", "credential1", "-provider", "user:///"};
     rc = cs.run(args2);
     assertEquals(outContent.toString(), 0, rc);
     assertTrue(outContent.toString().contains("credential1 has been 
successfully " +
@@ -122,14 +124,14 @@ public class TestCredShell {
     config.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, "user:///");
     cs.setConf(config);
     rc = cs.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("There are no valid " +
                "CredentialProviders configured."));
   }
   
   @Test
   public void testPromptForCredentialWithEmptyPasswd() throws Exception {
-    String[] args1 = {"create", "credential1", "--provider", 
+    String[] args1 = {"create", "credential1", "-provider",
         "jceks://file" + tmpDir + "/credstore.jceks"};
     ArrayList<String> passwords = new ArrayList<String>();
     passwords.add(null);
@@ -139,13 +141,13 @@ public class TestCredShell {
     shell.setConf(new Configuration());
     shell.setPasswordReader(new MockPasswordReader(passwords));
     rc = shell.run(args1);
-    assertEquals(outContent.toString(), -1, rc);
+    assertEquals(outContent.toString(), 1, rc);
     assertTrue(outContent.toString().contains("Passwords don't match"));
   }
 
   @Test
   public void testPromptForCredential() throws Exception {
-    String[] args1 = {"create", "credential1", "--provider", 
+    String[] args1 = {"create", "credential1", "-provider",
         "jceks://file" + tmpDir + "/credstore.jceks"};
     ArrayList<String> passwords = new ArrayList<String>();
     passwords.add("p@ssw0rd");
@@ -159,7 +161,7 @@ public class TestCredShell {
     assertTrue(outContent.toString().contains("credential1 has been 
successfully " +
         "created."));
     
-    String[] args2 = {"delete", "credential1", "--provider", 
+    String[] args2 = {"delete", "credential1", "-provider",
         "jceks://file" + tmpDir + "/credstore.jceks"};
     rc = shell.run(args2);
     assertEquals(0, rc);
@@ -186,4 +188,21 @@ public class TestCredShell {
       System.out.println(message);
     }
   }
+
+  @Test
+  public void testEmptyArgList() throws Exception {
+    CredentialShell shell = new CredentialShell();
+    shell.setConf(new Configuration());
+    assertEquals(1, shell.init(new String[0]));
+  }
+
+  @Test
+  public void testCommandHelpExitsNormally() throws Exception {
+    for (String cmd : Arrays.asList("create", "list", "delete")) {
+      CredentialShell shell = new CredentialShell();
+      shell.setConf(new Configuration());
+      assertEquals("Expected help argument on " + cmd + " to return 0",
+              0, shell.init(new String[] {cmd, "-help"}));
+    }
+  }
 }


Reply via email to