HDFS-6987. Move CipherSuite xattr information up to the encryption zone root. 
Contributed by Zhe Zhang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1737950d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1737950d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1737950d

Branch: refs/heads/HDFS-6581
Commit: 1737950d0fc83c68f386881b843c41b0b1e342de
Parents: c50fc92
Author: Andrew Wang <w...@apache.org>
Authored: Sun Sep 21 21:28:14 2014 -0700
Committer: Andrew Wang <w...@apache.org>
Committed: Sun Sep 21 21:29:46 2014 -0700

----------------------------------------------------------------------
 .../apache/hadoop/fs/FileEncryptionInfo.java    |  12 +-
 .../hadoop/crypto/key/kms/server/MiniKMS.java   |   1 -
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |   3 +
 .../java/org/apache/hadoop/hdfs/DFSClient.java  |   3 +-
 .../hadoop/hdfs/protocol/EncryptionZone.java    |  35 +++--
 .../apache/hadoop/hdfs/protocolPB/PBHelper.java |  48 +++++-
 .../server/namenode/EncryptionZoneManager.java  |  51 ++++---
 .../hdfs/server/namenode/FSDirectory.java       | 152 ++++++++++++++-----
 .../hdfs/server/namenode/FSEditLogLoader.java   |   8 +-
 .../hdfs/server/namenode/FSNamesystem.java      |  28 ++--
 .../hdfs/server/namenode/INodesInPath.java      |   3 +-
 .../hadoop-hdfs/src/main/proto/encryption.proto |   7 +-
 .../hadoop-hdfs/src/main/proto/hdfs.proto       |  22 ++-
 13 files changed, 271 insertions(+), 102 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
index f960233..641709d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
@@ -34,6 +34,7 @@ public class FileEncryptionInfo {
   private final CipherSuite cipherSuite;
   private final byte[] edek;
   private final byte[] iv;
+  private final String keyName;
   private final String ezKeyVersionName;
 
   /**
@@ -42,14 +43,16 @@ public class FileEncryptionInfo {
    * @param suite CipherSuite used to encrypt the file
    * @param edek encrypted data encryption key (EDEK) of the file
    * @param iv initialization vector (IV) used to encrypt the file
+   * @param keyName name of the key used for the encryption zone
    * @param ezKeyVersionName name of the KeyVersion used to encrypt the
    *                         encrypted data encryption key.
    */
   public FileEncryptionInfo(final CipherSuite suite, final byte[] edek,
-      final byte[] iv, final String ezKeyVersionName) {
+      final byte[] iv, final String keyName, final String ezKeyVersionName) {
     checkNotNull(suite);
     checkNotNull(edek);
     checkNotNull(iv);
+    checkNotNull(keyName);
     checkNotNull(ezKeyVersionName);
     checkArgument(edek.length == suite.getAlgorithmBlockSize(),
         "Unexpected key length");
@@ -58,6 +61,7 @@ public class FileEncryptionInfo {
     this.cipherSuite = suite;
     this.edek = edek;
     this.iv = iv;
+    this.keyName = keyName;
     this.ezKeyVersionName = ezKeyVersionName;
   }
 
@@ -84,6 +88,11 @@ public class FileEncryptionInfo {
   }
 
   /**
+   * @return name of the encryption zone key.
+   */
+  public String getKeyName() { return keyName; }
+
+  /**
    * @return name of the encryption zone KeyVersion used to encrypt the
    * encrypted data encryption key (EDEK).
    */
@@ -95,6 +104,7 @@ public class FileEncryptionInfo {
     builder.append("cipherSuite: " + cipherSuite);
     builder.append(", edek: " + Hex.encodeHexString(edek));
     builder.append(", iv: " + Hex.encodeHexString(iv));
+    builder.append(", keyName: " + keyName);
     builder.append(", ezKeyVersionName: " + ezKeyVersionName);
     builder.append("}");
     return builder.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
index 16e78ce..697d7ec 100644
--- 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
@@ -169,7 +169,6 @@ public class MiniKMS {
       kms.set(KMSConfiguration.KEY_PROVIDER_URI,
           "jceks://file@" + new Path(kmsConfDir, "kms.keystore").toUri());
       kms.set("hadoop.kms.authentication.type", "simple");
-      kms.setBoolean(KMSConfiguration.KEY_AUTHORIZATION_ENABLE, false);
       Writer writer = new FileWriter(kmsFile);
       kms.writeXml(writer);
       writer.close();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f29f7c5..d95e228 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -133,6 +133,9 @@ Trunk (Unreleased)
     HDFS-6609. Use DirectorySnapshottableFeature to represent a snapshottable
     directory. (Jing Zhao via wheat9)
 
+    HDFS-6987. Move CipherSuite xattr information up to the encryption zone
+    root. (Zhe Zhang via wang)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index ed08be0..6f94370 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -1319,8 +1319,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
           " an encrypted file");
     }
     EncryptedKeyVersion ekv = EncryptedKeyVersion.createForDecryption(
-        //TODO: here we have to put the keyName to be provided by HDFS-6987
-        null, feInfo.getEzKeyVersionName(), feInfo.getIV(),
+        feInfo.getKeyName(), feInfo.getEzKeyVersionName(), feInfo.getIV(),
         feInfo.getEncryptedDataEncryptionKey());
     try {
       return provider.decryptEncryptedKey(ekv);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/EncryptionZone.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/EncryptionZone.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/EncryptionZone.java
index a20513a..58e9eba 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/EncryptionZone.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/EncryptionZone.java
@@ -21,6 +21,7 @@ import org.apache.commons.lang.builder.EqualsBuilder;
 import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.crypto.CipherSuite;
 
 /**
  * A simple class for representing an encryption zone. Presently an encryption
@@ -31,32 +32,40 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceStability.Evolving
 public class EncryptionZone {
 
+  private final long id;
   private final String path;
+  private final CipherSuite suite;
   private final String keyName;
-  private final long id;
 
-  public EncryptionZone(String path, String keyName, long id) {
+  public EncryptionZone(long id, String path,
+      CipherSuite suite, String keyName) {
+    this.id = id;
     this.path = path;
+    this.suite = suite;
     this.keyName = keyName;
-    this.id = id;
+  }
+
+  public long getId() {
+    return id;
   }
 
   public String getPath() {
     return path;
   }
 
-  public String getKeyName() {
-    return keyName;
+  public CipherSuite getSuite() {
+    return suite;
   }
 
-  public long getId() {
-    return id;
+  public String getKeyName() {
+    return keyName;
   }
 
   @Override
   public int hashCode() {
     return new HashCodeBuilder(13, 31).
-      append(path).append(keyName).append(id).
+      append(id).append(path).
+      append(suite).append(keyName).
       toHashCode();
   }
 
@@ -74,16 +83,18 @@ public class EncryptionZone {
 
     EncryptionZone rhs = (EncryptionZone) obj;
     return new EqualsBuilder().
+      append(id, rhs.id).
       append(path, rhs.path).
+      append(suite, rhs.suite).
       append(keyName, rhs.keyName).
-      append(id, rhs.id).
       isEquals();
   }
 
   @Override
   public String toString() {
-    return "EncryptionZone [path=" + path +
-        ", keyName=" + keyName +
-        ", id=" + id + "]";
+    return "EncryptionZone [id=" + id +
+        ", path=" + path +
+        ", suite=" + suite +
+        ", keyName=" + keyName + "]";
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
index 862a803..c0b71eb 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
@@ -177,7 +177,6 @@ import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifie
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
-import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature;
 import org.apache.hadoop.hdfs.server.namenode.INodeId;
@@ -2332,12 +2331,14 @@ public class PBHelper {
     return EncryptionZoneProto.newBuilder()
         .setId(zone.getId())
         .setKeyName(zone.getKeyName())
-        .setPath(zone.getPath()).build();
+        .setPath(zone.getPath())
+        .setSuite(convert(zone.getSuite()))
+        .build();
   }
 
   public static EncryptionZone convert(EncryptionZoneProto proto) {
-    return new EncryptionZone(proto.getPath(), proto.getKeyName(),
-        proto.getId());
+    return new EncryptionZone(proto.getId(), proto.getPath(),
+        convert(proto.getSuite()), proto.getKeyName());
   }
 
   public static ShortCircuitShmSlotProto convert(SlotId slotId) {
@@ -2662,6 +2663,30 @@ public class PBHelper {
         .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
         .setIv(getByteString(info.getIV()))
         .setEzKeyVersionName(info.getEzKeyVersionName())
+        .setKeyName(info.getKeyName())
+        .build();
+  }
+
+  public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo(
+      FileEncryptionInfo info) {
+    if (info == null) {
+      return null;
+    }
+    return HdfsProtos.PerFileEncryptionInfoProto.newBuilder()
+        .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
+        .setIv(getByteString(info.getIV()))
+        .setEzKeyVersionName(info.getEzKeyVersionName())
+        .build();
+  }
+
+  public static HdfsProtos.ZoneEncryptionInfoProto convert(
+      CipherSuite suite, String keyName) {
+    if (suite == null || keyName == null) {
+      return null;
+    }
+    return HdfsProtos.ZoneEncryptionInfoProto.newBuilder()
+        .setSuite(convert(suite))
+        .setKeyName(keyName)
         .build();
   }
 
@@ -2674,7 +2699,20 @@ public class PBHelper {
     byte[] key = proto.getKey().toByteArray();
     byte[] iv = proto.getIv().toByteArray();
     String ezKeyVersionName = proto.getEzKeyVersionName();
-    return new FileEncryptionInfo(suite, key, iv, ezKeyVersionName);
+    String keyName = proto.getKeyName();
+    return new FileEncryptionInfo(suite, key, iv, keyName, ezKeyVersionName);
+  }
+
+  public static FileEncryptionInfo convert(
+      HdfsProtos.PerFileEncryptionInfoProto fileProto,
+      CipherSuite suite, String keyName) {
+    if (fileProto == null || suite == null || keyName == null) {
+      return null;
+    }
+    byte[] key = fileProto.getKey().toByteArray();
+    byte[] iv = fileProto.getIv().toByteArray();
+    String ezKeyVersionName = fileProto.getEzKeyVersionName();
+    return new FileEncryptionInfo(suite, key, iv, keyName, ezKeyVersionName);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
index c428690..e22f8b9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
@@ -26,6 +26,7 @@ import java.util.TreeMap;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.crypto.CipherSuite;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.XAttr;
 import org.apache.hadoop.fs.XAttrSetFlag;
@@ -33,6 +34,8 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.XAttrHelper;
 import org.apache.hadoop.hdfs.protocol.EncryptionZone;
 import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException;
+import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
+import org.apache.hadoop.hdfs.protocolPB.PBHelper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -53,8 +56,8 @@ public class EncryptionZoneManager {
   public static Logger LOG = LoggerFactory.getLogger(EncryptionZoneManager
       .class);
 
-  private static final EncryptionZone NULL_EZ =
-      new EncryptionZone("", "", -1);
+  public static final EncryptionZone NULL_EZ =
+      new EncryptionZone(-1, "", CipherSuite.UNKNOWN, "");
 
   /**
    * EncryptionZoneInt is the internal representation of an encryption zone. 
The
@@ -62,21 +65,27 @@ public class EncryptionZoneManager {
    * contains the EZ's pathname.
    */
   private static class EncryptionZoneInt {
-    private final String keyName;
     private final long inodeId;
+    private final CipherSuite suite;
+    private final String keyName;
 
-    EncryptionZoneInt(long inodeId, String keyName) {
-      this.keyName = keyName;
+    EncryptionZoneInt(long inodeId, CipherSuite suite, String keyName) {
       this.inodeId = inodeId;
-    }
-
-    String getKeyName() {
-      return keyName;
+      this.suite = suite;
+      this.keyName = keyName;
     }
 
     long getINodeId() {
       return inodeId;
     }
+
+    CipherSuite getSuite() {
+      return suite;
+    }
+
+    String getKeyName() {
+      return keyName;
+    }
   }
 
   private final TreeMap<Long, EncryptionZoneInt> encryptionZones;
@@ -109,9 +118,9 @@ public class EncryptionZoneManager {
    * @param inodeId of the encryption zone
    * @param keyName encryption zone key name
    */
-  void addEncryptionZone(Long inodeId, String keyName) {
+  void addEncryptionZone(Long inodeId, CipherSuite suite, String keyName) {
     assert dir.hasWriteLock();
-    unprotectedAddEncryptionZone(inodeId, keyName);
+    unprotectedAddEncryptionZone(inodeId, suite, keyName);
   }
 
   /**
@@ -122,8 +131,10 @@ public class EncryptionZoneManager {
    * @param inodeId of the encryption zone
    * @param keyName encryption zone key name
    */
-  void unprotectedAddEncryptionZone(Long inodeId, String keyName) {
-    final EncryptionZoneInt ez = new EncryptionZoneInt(inodeId, keyName);
+  void unprotectedAddEncryptionZone(Long inodeId,
+      CipherSuite suite, String keyName) {
+    final EncryptionZoneInt ez = new EncryptionZoneInt(
+        inodeId, suite, keyName);
     encryptionZones.put(inodeId, ez);
   }
 
@@ -207,8 +218,8 @@ public class EncryptionZoneManager {
     if (ezi == null) {
       return NULL_EZ;
     } else {
-      return new EncryptionZone(getFullPathName(ezi), ezi.getKeyName(),
-          ezi.getINodeId());
+      return new EncryptionZone(ezi.getINodeId(), getFullPathName(ezi),
+          ezi.getSuite(), ezi.getKeyName());
     }
   }
 
@@ -264,7 +275,7 @@ public class EncryptionZoneManager {
    * <p/>
    * Called while holding the FSDirectory lock.
    */
-  XAttr createEncryptionZone(String src, String keyName)
+  XAttr createEncryptionZone(String src, CipherSuite suite, String keyName)
       throws IOException {
     assert dir.hasWriteLock();
     if (dir.isNonEmptyDirectory(src)) {
@@ -284,8 +295,10 @@ public class EncryptionZoneManager {
           "encryption zone. (" + getFullPathName(ezi) + ")");
     }
 
+    final HdfsProtos.ZoneEncryptionInfoProto proto =
+        PBHelper.convert(suite, keyName);
     final XAttr ezXAttr = XAttrHelper
-        .buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, keyName.getBytes());
+        .buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, proto.toByteArray());
 
     final List<XAttr> xattrs = Lists.newArrayListWithCapacity(1);
     xattrs.add(ezXAttr);
@@ -327,8 +340,8 @@ public class EncryptionZoneManager {
         continue;
       }
       // Add the EZ to the result list
-      zones.add(new EncryptionZone(pathName,
-          ezi.getKeyName(), ezi.getINodeId()));
+      zones.add(new EncryptionZone(ezi.getINodeId(), pathName,
+          ezi.getSuite(), ezi.getKeyName()));
       count++;
       if (count >= numResponses) {
         break;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
index 9346ea5..961808e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
@@ -37,6 +37,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.crypto.CipherSuite;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileEncryptionInfo;
@@ -1402,9 +1403,10 @@ public class FSDirectory implements Closeable {
       if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) {
         return getSnapshotsListing(srcs, startAfter);
       }
-      final INodesInPath inodesInPath = getLastINodeInPath(srcs);
+      final INodesInPath inodesInPath = getINodesInPath(srcs, true);
+      final INode[] inodes = inodesInPath.getINodes();
       final int snapshot = inodesInPath.getPathSnapshotId();
-      final INode targetNode = inodesInPath.getLastINode();
+      final INode targetNode = inodes[inodes.length - 1];
       if (targetNode == null)
         return null;
       byte parentStoragePolicy = isSuperUser ?
@@ -1414,7 +1416,7 @@ public class FSDirectory implements Closeable {
         return new DirectoryListing(
             new HdfsFileStatus[]{createFileStatus(HdfsFileStatus.EMPTY_NAME,
                 targetNode, needLocation, parentStoragePolicy, snapshot,
-                isRawPath)}, 0);
+                isRawPath, inodesInPath)}, 0);
       }
 
       final INodeDirectory dirInode = targetNode.asDirectory();
@@ -1431,7 +1433,7 @@ public class FSDirectory implements Closeable {
             cur.getLocalStoragePolicyID(): BlockStoragePolicy.ID_UNSPECIFIED;
         listing[i] = createFileStatus(cur.getLocalNameBytes(), cur, 
needLocation,
             getStoragePolicyID(curPolicy, parentStoragePolicy), snapshot,
-            isRawPath);
+            isRawPath, inodesInPath);
         listingCnt++;
         if (needLocation) {
             // Once we  hit lsLimit locations, stop.
@@ -1482,7 +1484,8 @@ public class FSDirectory implements Closeable {
     for (int i = 0; i < numOfListing; i++) {
       Root sRoot = snapshots.get(i + skipSize).getRoot();
       listing[i] = createFileStatus(sRoot.getLocalNameBytes(), sRoot,
-          BlockStoragePolicy.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID, false);
+          BlockStoragePolicy.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID,
+          false, null);
     }
     return new DirectoryListing(
         listing, snapshots.size() - skipSize - numOfListing);
@@ -1505,12 +1508,14 @@ public class FSDirectory implements Closeable {
       if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) {
         return getFileInfo4DotSnapshot(srcs);
       }
-      final INodesInPath inodesInPath = getLastINodeInPath(srcs, resolveLink);
-      final INode i = inodesInPath.getINode(0);
+      final INodesInPath inodesInPath = getINodesInPath(srcs, resolveLink);
+      final INode[] inodes = inodesInPath.getINodes();
+      final INode i = inodes[inodes.length - 1];
       byte policyId = includeStoragePolicy && i != null && !i.isSymlink() ?
           i.getStoragePolicyID() : BlockStoragePolicy.ID_UNSPECIFIED;
       return i == null ? null : createFileStatus(HdfsFileStatus.EMPTY_NAME, i,
-          policyId, inodesInPath.getPathSnapshotId(), isRawPath);
+          policyId, inodesInPath.getPathSnapshotId(), isRawPath,
+          inodesInPath);
     } finally {
       readUnlock();
     }
@@ -2162,8 +2167,17 @@ public class FSDirectory implements Closeable {
           for (XAttr xattr : xattrs) {
             final String xaName = XAttrHelper.getPrefixName(xattr);
             if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) {
-              ezManager.unprotectedAddEncryptionZone(inode.getId(),
-                  new String(xattr.getValue()));
+              try {
+                final HdfsProtos.ZoneEncryptionInfoProto ezProto =
+                    HdfsProtos.ZoneEncryptionInfoProto.parseFrom(
+                        xattr.getValue());
+                ezManager.unprotectedAddEncryptionZone(inode.getId(),
+                    PBHelper.convert(ezProto.getSuite()),
+                    ezProto.getKeyName());
+              } catch (InvalidProtocolBufferException e) {
+                NameNode.LOG.warn("Error parsing protocol buffer of " +
+                    "EZ XAttr " + xattr.getName());
+              }
             }
           }
         }
@@ -2355,12 +2369,15 @@ public class FSDirectory implements Closeable {
    * @throws IOException if any error occurs
    */
   private HdfsFileStatus createFileStatus(byte[] path, INode node,
-      boolean needLocation, byte storagePolicy, int snapshot, boolean 
isRawPath)
+      boolean needLocation, byte storagePolicy, int snapshot,
+      boolean isRawPath, INodesInPath iip)
       throws IOException {
     if (needLocation) {
-      return createLocatedFileStatus(path, node, storagePolicy, snapshot, 
isRawPath);
+      return createLocatedFileStatus(path, node, storagePolicy, snapshot,
+          isRawPath, iip);
     } else {
-      return createFileStatus(path, node, storagePolicy, snapshot, isRawPath);
+      return createFileStatus(path, node, storagePolicy, snapshot,
+          isRawPath, iip);
     }
   }
 
@@ -2368,14 +2385,14 @@ public class FSDirectory implements Closeable {
    * Create FileStatus by file INode 
    */
   HdfsFileStatus createFileStatus(byte[] path, INode node, byte storagePolicy,
-      int snapshot, boolean isRawPath) throws IOException {
+      int snapshot, boolean isRawPath, INodesInPath iip) throws IOException {
      long size = 0;     // length is zero for directories
      short replication = 0;
      long blocksize = 0;
      final boolean isEncrypted;
 
      final FileEncryptionInfo feInfo = isRawPath ? null :
-         getFileEncryptionInfo(node, snapshot);
+         getFileEncryptionInfo(node, snapshot, iip);
 
      if (node.isFile()) {
        final INodeFile fileNode = node.asFile();
@@ -2413,7 +2430,8 @@ public class FSDirectory implements Closeable {
    * Create FileStatus with location info by file INode
    */
   private HdfsLocatedFileStatus createLocatedFileStatus(byte[] path, INode 
node,
-      byte storagePolicy, int snapshot, boolean isRawPath) throws IOException {
+      byte storagePolicy, int snapshot, boolean isRawPath,
+      INodesInPath iip) throws IOException {
     assert hasReadLock();
     long size = 0; // length is zero for directories
     short replication = 0;
@@ -2421,7 +2439,7 @@ public class FSDirectory implements Closeable {
     LocatedBlocks loc = null;
     final boolean isEncrypted;
     final FileEncryptionInfo feInfo = isRawPath ? null :
-        getFileEncryptionInfo(node, snapshot);
+        getFileEncryptionInfo(node, snapshot, iip);
     if (node.isFile()) {
       final INodeFile fileNode = node.asFile();
       size = fileNode.computeFileSize(snapshot);
@@ -2746,11 +2764,11 @@ public class FSDirectory implements Closeable {
     }
   }
 
-  XAttr createEncryptionZone(String src, String keyName)
+  XAttr createEncryptionZone(String src, CipherSuite suite, String keyName)
     throws IOException {
     writeLock();
     try {
-      return ezManager.createEncryptionZone(src, keyName);
+      return ezManager.createEncryptionZone(src, suite, keyName);
     } finally {
       writeUnlock();
     }
@@ -2781,7 +2799,8 @@ public class FSDirectory implements Closeable {
   void setFileEncryptionInfo(String src, FileEncryptionInfo info)
       throws IOException {
     // Make the PB for the xattr
-    final HdfsProtos.FileEncryptionInfoProto proto = PBHelper.convert(info);
+    final HdfsProtos.PerFileEncryptionInfoProto proto =
+        PBHelper.convertPerFileEncInfo(info);
     final byte[] protoBytes = proto.toByteArray();
     final XAttr fileEncryptionAttr =
         XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
@@ -2797,35 +2816,64 @@ public class FSDirectory implements Closeable {
   }
 
   /**
-   * Return the FileEncryptionInfo for an INode, or null if the INode is not
-   * an encrypted file.
-   */
-  FileEncryptionInfo getFileEncryptionInfo(INode inode, int snapshotId)
-      throws IOException {
+   * This function combines the per-file encryption info (obtained
+   * from the inode's XAttrs), and the encryption info from its zone, and
+   * returns a consolidated FileEncryptionInfo instance. Null is returned
+   * for non-encrypted files.
+   *
+   * @param inode inode of the file
+   * @param snapshotId ID of the snapshot that
+   *                   we want to get encryption info from
+   * @param iip inodes in the path containing the file, passed in to
+   *            avoid obtaining the list of inodes again; if iip is
+   *            null then the list of inodes will be obtained again
+   * @return consolidated file encryption info; null for non-encrypted files
+   */
+  FileEncryptionInfo getFileEncryptionInfo(INode inode, int snapshotId,
+      INodesInPath iip) throws IOException {
     if (!inode.isFile()) {
       return null;
     }
     readLock();
     try {
-      List<XAttr> xAttrs = XAttrStorage.readINodeXAttrs(inode, snapshotId);
-      if (xAttrs == null) {
-        return null;
+      if (iip == null) {
+        iip = getINodesInPath(inode.getFullPathName(), true);
       }
-      for (XAttr x : xAttrs) {
-        if (XAttrHelper.getPrefixName(x)
-            .equals(CRYPTO_XATTR_FILE_ENCRYPTION_INFO)) {
-          try {
-            HdfsProtos.FileEncryptionInfoProto proto =
-                HdfsProtos.FileEncryptionInfoProto.parseFrom(x.getValue());
-            FileEncryptionInfo feInfo = PBHelper.convert(proto);
-            return feInfo;
-          } catch (InvalidProtocolBufferException e) {
-            throw new IOException("Could not parse file encryption info for " +
-                "inode " + inode, e);
-          }
+      EncryptionZone encryptionZone = getEZForPath(iip);
+      if (encryptionZone == null ||
+          encryptionZone.equals(EncryptionZoneManager.NULL_EZ)) {
+        // not an encrypted file
+        return null;
+      } else if(encryptionZone.getPath() == null
+          || encryptionZone.getPath().isEmpty()) {
+        if (NameNode.LOG.isDebugEnabled()) {
+          NameNode.LOG.debug("Encryption zone " +
+              encryptionZone.getPath() + " does not have a valid path.");
         }
       }
-      return null;
+
+      CipherSuite suite = encryptionZone.getSuite();
+      String keyName = encryptionZone.getKeyName();
+
+      XAttr fileXAttr = unprotectedGetXAttrByName(inode, snapshotId,
+          CRYPTO_XATTR_FILE_ENCRYPTION_INFO);
+
+      if (fileXAttr == null) {
+        NameNode.LOG.warn("Could not find encryption XAttr for file " +
+            inode.getFullPathName() + " in encryption zone " +
+            encryptionZone.getPath());
+        return null;
+      }
+
+      try {
+        HdfsProtos.PerFileEncryptionInfoProto fileProto =
+            HdfsProtos.PerFileEncryptionInfoProto.parseFrom(
+                fileXAttr.getValue());
+        return PBHelper.convert(fileProto, suite, keyName);
+      } catch (InvalidProtocolBufferException e) {
+        throw new IOException("Could not parse file encryption info for " +
+            "inode " + inode, e);
+      }
     } finally {
       readUnlock();
     }
@@ -2860,7 +2908,11 @@ public class FSDirectory implements Closeable {
        * of encryption zones.
        */
       if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) {
-        ezManager.addEncryptionZone(inode.getId(), new 
String(xattr.getValue()));
+        final HdfsProtos.ZoneEncryptionInfoProto ezProto =
+            HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue());
+        ezManager.addEncryptionZone(inode.getId(),
+            PBHelper.convert(ezProto.getSuite()),
+            ezProto.getKeyName());
       }
 
       if (!isFile && SECURITY_XATTR_UNREADABLE_BY_SUPERUSER.equals(xaName)) {
@@ -2977,6 +3029,22 @@ public class FSDirectory implements Closeable {
     return XAttrStorage.readINodeXAttrs(inode, snapshotId);
   }
 
+  private XAttr unprotectedGetXAttrByName(INode inode, int snapshotId,
+      String xAttrName)
+      throws IOException {
+    List<XAttr> xAttrs = XAttrStorage.readINodeXAttrs(inode, snapshotId);
+    if (xAttrs == null) {
+      return null;
+    }
+    for (XAttr x : xAttrs) {
+      if (XAttrHelper.getPrefixName(x)
+          .equals(xAttrName)) {
+        return x;
+      }
+    }
+    return null;
+  }
+
   private static INode resolveLastINode(String src, INodesInPath iip)
       throws FileNotFoundException {
     INode inode = iip.getLastINode();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
index 46edf54..cc0572e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
@@ -341,8 +341,10 @@ public class FSEditLogLoader {
       // 3. OP_ADD to open file for append
 
       // See if the file already exists (persistBlocks call)
-      final INodesInPath iip = fsDir.getLastINodeInPath(path);
-      INodeFile oldFile = INodeFile.valueOf(iip.getINode(0), path, true);
+      final INodesInPath iip = fsDir.getINodesInPath(path, true);
+      final INode[] inodes = iip.getINodes();
+      INodeFile oldFile = INodeFile.valueOf(
+          inodes[inodes.length - 1], path, true);
       if (oldFile != null && addCloseOp.overwrite) {
         // This is OP_ADD with overwrite
         fsDir.unprotectedDelete(path, addCloseOp.mtime);
@@ -372,7 +374,7 @@ public class FSEditLogLoader {
           HdfsFileStatus stat = fsNamesys.dir.createFileStatus(
               HdfsFileStatus.EMPTY_NAME, newFile,
               BlockStoragePolicy.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID,
-              false);
+              false, iip);
           fsNamesys.addCacheEntryWithPayload(addCloseOp.rpcClientId,
               addCloseOp.rpcCallId, stat);
         }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 4dc2786..876cf49 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
-import static org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
 import static org.apache.hadoop.crypto.key.KeyProviderCryptoExtension
     .EncryptedKeyVersion;
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
@@ -134,6 +133,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.CipherSuite;
+import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
 import org.apache.hadoop.fs.CacheFlag;
@@ -165,7 +165,6 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
-import org.apache.hadoop.hdfs.StorageType;
 import org.apache.hadoop.hdfs.XAttrHelper;
 import org.apache.hadoop.hdfs.UnknownCipherSuiteException;
 import org.apache.hadoop.hdfs.protocol.AclException;
@@ -1833,8 +1832,10 @@ public class FSNamesystem implements Namesystem, 
FSClusterStats,
           doAccessTime = false;
         }
 
-        final INodesInPath iip = dir.getLastINodeInPath(src);
-        final INodeFile inode = INodeFile.valueOf(iip.getLastINode(), src);
+        final INodesInPath iip = dir.getINodesInPath(src, true);
+        final INode[] inodes = iip.getINodes();
+        final INodeFile inode = INodeFile.valueOf(
+            inodes[inodes.length - 1], src);
         if (isPermissionEnabled) {
           checkUnreadableBySuperuser(pc, inode, iip.getPathSnapshotId());
         }
@@ -1867,7 +1868,8 @@ public class FSNamesystem implements Namesystem, 
FSClusterStats,
 
         final FileEncryptionInfo feInfo =
           FSDirectory.isReservedRawName(srcArg) ?
-          null : dir.getFileEncryptionInfo(inode, iip.getPathSnapshotId());
+          null : dir.getFileEncryptionInfo(inode, iip.getPathSnapshotId(),
+              iip);
 
         final LocatedBlocks blocks =
           blockManager.createLocatedBlocks(inode.getBlocks(), fileSize,
@@ -2599,7 +2601,7 @@ public class FSNamesystem implements Namesystem, 
FSClusterStats,
       feInfo = new FileEncryptionInfo(suite,
           edek.getEncryptedKeyVersion().getMaterial(),
           edek.getEncryptedKeyIv(),
-          edek.getEncryptionKeyVersionName());
+          ezKeyName, edek.getEncryptionKeyVersionName());
       Preconditions.checkNotNull(feInfo);
     }
 
@@ -8619,8 +8621,8 @@ public class FSNamesystem implements Namesystem, 
FSClusterStats,
         throw new IOException("Must specify a key name when creating an " +
             "encryption zone");
       }
-      KeyVersion keyVersion = provider.getCurrentKey(keyName);
-      if (keyVersion == null) {
+      KeyProvider.Metadata metadata = provider.getMetadata(keyName);
+      if (metadata == null) {
         /*
          * It would be nice if we threw something more specific than
          * IOException when the key is not found, but the KeyProvider API
@@ -8631,7 +8633,8 @@ public class FSNamesystem implements Namesystem, 
FSClusterStats,
          */
         throw new IOException("Key " + keyName + " doesn't exist.");
       }
-      createEncryptionZoneInt(src, keyName, cacheEntry != null);
+      createEncryptionZoneInt(src, metadata.getCipher(),
+          keyName, cacheEntry != null);
       success = true;
     } catch (AccessControlException e) {
       logAuditEvent(false, "createEncryptionZone", src);
@@ -8641,8 +8644,8 @@ public class FSNamesystem implements Namesystem, 
FSClusterStats,
     }
   }
 
-  private void createEncryptionZoneInt(final String srcArg, String keyName,
-      final boolean logRetryCache) throws IOException {
+  private void createEncryptionZoneInt(final String srcArg, String cipher,
+      String keyName, final boolean logRetryCache) throws IOException {
     String src = srcArg;
     HdfsFileStatus resultingStat = null;
     checkSuperuserPrivilege();
@@ -8656,7 +8659,8 @@ public class FSNamesystem implements Namesystem, 
FSClusterStats,
       checkNameNodeSafeMode("Cannot create encryption zone on " + src);
       src = resolvePath(src, pathComponents);
 
-      final XAttr ezXAttr = dir.createEncryptionZone(src, keyName);
+      final CipherSuite suite = CipherSuite.convert(cipher);
+      final XAttr ezXAttr = dir.createEncryptionZone(src, suite, keyName);
       List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
       xAttrs.add(ezXAttr);
       getEditLog().logSetXAttrs(src, xAttrs, logRetryCache);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java
index 34a3268..ba052a4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java
@@ -133,6 +133,7 @@ public class INodesInPath {
    *        be thrown when the path refers to a symbolic link.
    * @return the specified number of existing INodes in the path
    */
+  // TODO: Eliminate null elements from inodes (to be provided by HDFS-7104)
   static INodesInPath resolve(final INodeDirectory startingDir,
       final byte[][] components, final int numOfINodes, 
       final boolean resolveLink) throws UnresolvedLinkException {
@@ -311,7 +312,7 @@ public class INodesInPath {
   }
 
   /**
-   * @return the whole inodes array including the null elements.
+   * @return the inodes array excluding the null elements.
    */
   INode[] getINodes() {
     if (capacity < inodes.length) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/encryption.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/encryption.proto 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/encryption.proto
index 1a33cdc..c4b7009 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/encryption.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/encryption.proto
@@ -46,9 +46,10 @@ message ListEncryptionZonesRequestProto {
 }
 
 message EncryptionZoneProto {
-  required string path = 1;
-  required string keyName = 2;
-  required int64 id = 3;
+  required int64 id = 1;
+  required string path = 2;
+  required CipherSuite suite = 3;
+  required string keyName = 4;
 }
 
 message ListEncryptionZonesResponseProto {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1737950d/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
index f12055c..d1ba68f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
@@ -216,7 +216,27 @@ message FileEncryptionInfoProto {
   required CipherSuite suite = 1;
   required bytes key = 2;
   required bytes iv = 3;
-  required string ezKeyVersionName = 4;
+  required string keyName = 4;
+  required string ezKeyVersionName = 5;
+}
+
+/**
+ * Encryption information for an individual
+ * file within an encryption zone
+ */
+message PerFileEncryptionInfoProto {
+  required bytes key = 1;
+  required bytes iv = 2;
+  required string ezKeyVersionName = 3;
+}
+
+/**
+ * Encryption information for an encryption
+ * zone
+ */
+message ZoneEncryptionInfoProto {
+  required CipherSuite suite = 1;
+  required string keyName = 2;
 }
 
 /**

Reply via email to