Author: umamahesh Date: Fri May 9 17:01:21 2014 New Revision: 1593581 URL: http://svn.apache.org/r1593581 Log: HDFS-6346. Optimize OP_SET_XATTRS by persisting single Xattr entry per setXattr/removeXattr api call. Contributed by Yi Liu
Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-2006.txt hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOpCodes.java hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/xattr.proto hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-2006.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-2006.txt?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-2006.txt (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-2006.txt Fri May 9 17:01:21 2014 @@ -29,6 +29,9 @@ HDFS-2006 (Unreleased) OPTIMIZATIONS + HDFS-6346. Optimize OP_SET_XATTRS by persisting single Xattr entry per setXattr/removeXattr api call + (Yi Liu via umamahesh) + BUG FIXES HDFS-6331. ClientProtocol#setXattr should not be annotated idempotent. Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java Fri May 9 17:01:21 2014 @@ -50,6 +50,7 @@ import org.apache.hadoop.fs.permission.P import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ClientProtocol; @@ -2889,14 +2890,20 @@ public class FSDirectory implements Clos void removeXAttr(String src, XAttr xAttr) throws IOException { writeLock(); try { - List<XAttr> newXAttrs = unprotectedRemoveXAttr(src, xAttr); - fsImage.getEditLog().logSetXAttrs(src, newXAttrs); + XAttr removedXAttr = unprotectedRemoveXAttr(src, xAttr); + if (removedXAttr != null) { + fsImage.getEditLog().logRemoveXAttr(src, removedXAttr); + } else { + NameNode.stateChangeLog.info("DIR* FSDirectory.removeXAttr: XAttr " + + XAttrHelper.getPrefixName(xAttr) + + " does not exist on the path " + src); + } } finally { writeUnlock(); } } - private List<XAttr> unprotectedRemoveXAttr(String src, + XAttr unprotectedRemoveXAttr(String src, XAttr xAttr) throws IOException { assert hasWriteLock(); INodesInPath iip = rootDir.getINodesInPath4Write(normalizePath(src), true); @@ -2904,9 +2911,11 @@ public class FSDirectory implements Clos int snapshotId = iip.getLatestSnapshotId(); List<XAttr> existingXAttrs = XAttrStorage.readINodeXAttrs(inode); List<XAttr> newXAttrs = filterINodeXAttr(existingXAttrs, xAttr); - XAttrStorage.updateINodeXAttrs(inode, newXAttrs, snapshotId); - - return newXAttrs; + if (existingXAttrs.size() != newXAttrs.size()) { + XAttrStorage.updateINodeXAttrs(inode, newXAttrs, snapshotId); + return xAttr; + } + return null; } List<XAttr> filterINodeXAttr(List<XAttr> existingXAttrs, @@ -2930,14 +2939,14 @@ public class FSDirectory implements Clos throws IOException { writeLock(); try { - List<XAttr> newXAttrs = unprotectedSetXAttr(src, xAttr, flag); - fsImage.getEditLog().logSetXAttrs(src, newXAttrs); + unprotectedSetXAttr(src, xAttr, flag); + fsImage.getEditLog().logSetXAttr(src, xAttr); } finally { writeUnlock(); } } - List<XAttr> unprotectedSetXAttr(String src, XAttr xAttr, + void unprotectedSetXAttr(String src, XAttr xAttr, EnumSet<XAttrSetFlag> flag) throws IOException { assert hasWriteLock(); INodesInPath iip = rootDir.getINodesInPath4Write(normalizePath(src), true); @@ -2946,8 +2955,6 @@ public class FSDirectory implements Clos List<XAttr> existingXAttrs = XAttrStorage.readINodeXAttrs(inode); List<XAttr> newXAttrs = setINodeXAttr(existingXAttrs, xAttr, flag); XAttrStorage.updateINodeXAttrs(inode, newXAttrs, snapshotId); - - return newXAttrs; } List<XAttr> setINodeXAttr(List<XAttr> existingXAttrs, XAttr xAttr, @@ -2977,16 +2984,6 @@ public class FSDirectory implements Clos return xAttrs; } - void unprotectedUpdateXAttrs(String src, List<XAttr> xAttrs) - throws IOException { - assert hasWriteLock(); - INodesInPath iip = rootDir.getINodesInPath4Write(normalizePath(src), true); - INode inode = resolveLastINode(src, iip); - int snapshotId = iip.getLatestSnapshotId(); - - XAttrStorage.updateINodeXAttrs(inode, xAttrs, snapshotId); - } - List<XAttr> getXAttrs(String src) throws IOException { String srcs = normalizePath(src); readLock(); Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java Fri May 9 17:01:21 2014 @@ -70,6 +70,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.ReassignLeaseOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RemoveCacheDirectiveInfoOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RemoveCachePoolOp; +import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RemoveXAttrOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RenameOldOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RenameOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RenameSnapshotOp; @@ -81,7 +82,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetPermissionsOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetQuotaOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetReplicationOp; -import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetXAttrsOp; +import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetXAttrOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SymlinkOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.TimesOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.UpdateBlocksOp; @@ -1053,10 +1054,17 @@ public class FSEditLog implements LogsPu logEdit(op); } - void logSetXAttrs(String src, List<XAttr> xAttrs) { - final SetXAttrsOp op = SetXAttrsOp.getInstance(); + void logSetXAttr(String src, XAttr xAttr) { + final SetXAttrOp op = SetXAttrOp.getInstance(); op.src = src; - op.xAttrs = xAttrs; + op.xAttr = xAttr; + logEdit(op); + } + + void logRemoveXAttr(String src, XAttr xAttr) { + final RemoveXAttrOp op = RemoveXAttrOp.getInstance(); + op.src = src; + op.xAttr = xAttr; logEdit(op); } Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java Fri May 9 17:01:21 2014 @@ -25,12 +25,14 @@ import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.EnumMap; +import java.util.EnumSet; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.HdfsConstants; @@ -76,7 +78,8 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetPermissionsOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetQuotaOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetReplicationOp; -import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetXAttrsOp; +import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SetXAttrOp; +import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.RemoveXAttrOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.SymlinkOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.TimesOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.UpdateBlocksOp; @@ -799,11 +802,16 @@ public class FSEditLogLoader { fsDir.unprotectedSetAcl(setAclOp.src, setAclOp.aclEntries); break; } - case OP_SET_XATTRS: { - SetXAttrsOp setXAttrsOp = (SetXAttrsOp) op; - fsDir.unprotectedUpdateXAttrs(setXAttrsOp.src, setXAttrsOp.xAttrs); + case OP_SET_XATTR: { + SetXAttrOp setXAttrOp = (SetXAttrOp) op; + fsDir.unprotectedSetXAttr(setXAttrOp.src, setXAttrOp.xAttr, + EnumSet.of(XAttrSetFlag.CREATE, XAttrSetFlag.REPLACE)); break; } + case OP_REMOVE_XATTR: { + RemoveXAttrOp removeXAttrOp = (RemoveXAttrOp) op; + fsDir.unprotectedRemoveXAttr(removeXAttrOp.src, removeXAttrOp.xAttr); + } default: throw new IOException("Invalid operation read " + op.opCode); } Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java Fri May 9 17:01:21 2014 @@ -54,7 +54,8 @@ import static org.apache.hadoop.hdfs.ser import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_SET_PERMISSIONS; import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_SET_QUOTA; import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_SET_REPLICATION; -import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_SET_XATTRS; +import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_SET_XATTR; +import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_REMOVE_XATTR; import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_START_LOG_SEGMENT; import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_SYMLINK; import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_TIMES; @@ -190,7 +191,8 @@ public abstract class FSEditLogOp { OP_ROLLING_UPGRADE_START, "start")); inst.put(OP_ROLLING_UPGRADE_FINALIZE, new RollingUpgradeOp( OP_ROLLING_UPGRADE_FINALIZE, "finalize")); - inst.put(OP_SET_XATTRS, new SetXAttrsOp()); + inst.put(OP_SET_XATTR, new SetXAttrOp()); + inst.put(OP_REMOVE_XATTR, new RemoveXAttrOp()); } public FSEditLogOp get(FSEditLogOpCodes opcode) { @@ -3496,23 +3498,23 @@ public abstract class FSEditLogOp { } } - static class SetXAttrsOp extends FSEditLogOp { - List<XAttr> xAttrs = Lists.newArrayList(); + static class RemoveXAttrOp extends FSEditLogOp { + XAttr xAttr; String src; - private SetXAttrsOp() { - super(OP_SET_XATTRS); + private RemoveXAttrOp() { + super(OP_REMOVE_XATTR); } - static SetXAttrsOp getInstance() { - return new SetXAttrsOp(); + static RemoveXAttrOp getInstance() { + return new RemoveXAttrOp(); } @Override void readFields(DataInputStream in, int logVersion) throws IOException { XAttrEditLogProto p = XAttrEditLogProto.parseDelimitedFrom(in); src = p.getSrc(); - xAttrs = PBHelper.convertXAttrs(p.getXAttrsList()); + xAttr = PBHelper.convertXAttr(p.getXAttr()); } @Override @@ -3521,23 +3523,62 @@ public abstract class FSEditLogOp { if (src != null) { b.setSrc(src); } - b.addAllXAttrs(PBHelper.convertXAttrProto(xAttrs)); + b.setXAttr(PBHelper.convertXAttrProto(xAttr)); b.build().writeDelimitedTo(out); } @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "SRC", src); - appendXAttrsToXml(contentHandler, xAttrs); + appendXAttrToXml(contentHandler, xAttr); } @Override void fromXml(Stanza st) throws InvalidXmlException { src = st.getValue("SRC"); - xAttrs = readXAttrsFromXml(st); - if (xAttrs == null) { - xAttrs = Lists.newArrayList(); + xAttr = readXAttrFromXml(st); + } + } + + static class SetXAttrOp extends FSEditLogOp { + XAttr xAttr; + String src; + + private SetXAttrOp() { + super(OP_SET_XATTR); + } + + static SetXAttrOp getInstance() { + return new SetXAttrOp(); + } + + @Override + void readFields(DataInputStream in, int logVersion) throws IOException { + XAttrEditLogProto p = XAttrEditLogProto.parseDelimitedFrom(in); + src = p.getSrc(); + xAttr = PBHelper.convertXAttr(p.getXAttr()); + } + + @Override + public void writeFields(DataOutputStream out) throws IOException { + XAttrEditLogProto.Builder b = XAttrEditLogProto.newBuilder(); + if (src != null) { + b.setSrc(src); } + b.setXAttr(PBHelper.convertXAttrProto(xAttr)); + b.build().writeDelimitedTo(out); + } + + @Override + protected void toXml(ContentHandler contentHandler) throws SAXException { + XMLUtils.addSaxString(contentHandler, "SRC", src); + appendXAttrToXml(contentHandler, xAttr); + } + + @Override + void fromXml(Stanza st) throws InvalidXmlException { + src = st.getValue("SRC"); + xAttr = readXAttrFromXml(st); } } @@ -4157,42 +4198,41 @@ public abstract class FSEditLogOp { return aclEntries; } - private static void appendXAttrsToXml(ContentHandler contentHandler, - List<XAttr> xAttrs) throws SAXException { - for (XAttr a : xAttrs) { - contentHandler.startElement("", "", "XATTR", new AttributesImpl()); - XMLUtils.addSaxString(contentHandler, "NAMESPACE", - a.getNameSpace().toString()); - XMLUtils.addSaxString(contentHandler, "NAME", a.getName()); + private static void appendXAttrToXml(ContentHandler contentHandler, + XAttr xAttr) throws SAXException { + contentHandler.startElement("", "", "XATTR", new AttributesImpl()); + XMLUtils.addSaxString(contentHandler, "NAMESPACE", + xAttr.getNameSpace().toString()); + XMLUtils.addSaxString(contentHandler, "NAME", xAttr.getName()); + if (xAttr.getValue() != null) { try { XMLUtils.addSaxString(contentHandler, "VALUE", - XAttrCodec.encodeValue(a.getValue(), XAttrCodec.HEX)); + XAttrCodec.encodeValue(xAttr.getValue(), XAttrCodec.HEX)); } catch (IOException e) { throw new SAXException(e); } - contentHandler.endElement("", "", "XATTR"); } + contentHandler.endElement("", "", "XATTR"); } - private static List<XAttr> readXAttrsFromXml(Stanza st) + private static XAttr readXAttrFromXml(Stanza st) throws InvalidXmlException { - List<XAttr> xAttrs = Lists.newArrayList(); if (!st.hasChildren("XATTR")) { return null; } - try { - List<Stanza> stanzas = st.getChildren("XATTR"); - for (Stanza s : stanzas) { - XAttr a = new XAttr.Builder() - .setNameSpace(XAttr.NameSpace.valueOf(s.getValue("NAMESPACE"))) - .setName(s.getValue("NAME")) - .setValue(XAttrCodec.decodeValue(s.getValue("VALUE"))).build(); - xAttrs.add(a); + Stanza a = st.getChildren("XATTR").get(0); + XAttr.Builder builder = new XAttr.Builder(); + builder.setNameSpace(XAttr.NameSpace.valueOf(a.getValue("NAMESPACE"))). + setName(a.getValue("NAME")); + String v = a.getValueOrNull("VALUE"); + if (v != null) { + try { + builder.setValue(XAttrCodec.decodeValue(v)); + } catch (IOException e) { + throw new InvalidXmlException(e.toString()); } - return xAttrs; - } catch (IOException e) { - throw new InvalidXmlException(e.toString()); } + return builder.build(); } } Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOpCodes.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOpCodes.java?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOpCodes.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOpCodes.java Fri May 9 17:01:21 2014 @@ -70,7 +70,8 @@ public enum FSEditLogOpCodes { OP_SET_ACL ((byte) 40), OP_ROLLING_UPGRADE_START ((byte) 41), OP_ROLLING_UPGRADE_FINALIZE ((byte) 42), - OP_SET_XATTRS ((byte) 43), + OP_SET_XATTR ((byte) 43), + OP_REMOVE_XATTR ((byte) 44), // Note that the current range of the valid OP code is 0~127 OP_INVALID ((byte) -1); Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/xattr.proto URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/xattr.proto?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/xattr.proto (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/xattr.proto Fri May 9 17:01:21 2014 @@ -36,7 +36,7 @@ message XAttrProto { message XAttrEditLogProto { required string src = 1; - repeated XAttrProto xAttrs = 2; + optional XAttrProto xAttr = 2; } enum XAttrSetFlagProto { Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java Fri May 9 17:01:21 2014 @@ -1134,11 +1134,13 @@ public class DFSTestUtil { .setType(AclEntryType.OTHER) .build()); filesystem.setAcl(pathConcatTarget, aclEntryList); - // OP_SET_XATTRS + // OP_SET_XATTR filesystem.setXAttr(pathConcatTarget, "user.a1", new byte[]{0x31, 0x32, 0x33}); filesystem.setXAttr(pathConcatTarget, "user.a2", new byte[]{0x37, 0x38, 0x39}); + // OP_REMOVE_XATTR + filesystem.removeXAttr(pathConcatTarget, "user.a2"); } public static void abortStream(DFSOutputStream out) throws IOException { Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== Binary files - no diff available. Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml?rev=1593581&r1=1593580&r2=1593581&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml Fri May 9 17:01:21 2014 @@ -938,16 +938,32 @@ </DATA> </RECORD> <RECORD> - <OPCODE>OP_SET_XATTRS</OPCODE> + <OPCODE>OP_SET_XATTR</OPCODE> <DATA> <TXID>75</TXID> <SRC>/file_concat_target</SRC> + <XATTR> + <NAMESPACE>USER</NAMESPACE> + <NAME>a1</NAME> + <VALUE>0x313233</VALUE> + </XATTR> </DATA> </RECORD> <RECORD> - <OPCODE>OP_END_LOG_SEGMENT</OPCODE> + <OPCODE>OP_REMOVE_XATTR</OPCODE> <DATA> <TXID>76</TXID> + <SRC>/file_concat_target</SRC> + <XATTR> + <NAMESPACE>USER</NAMESPACE> + <NAME>a1</NAME> + </XATTR> + </DATA> + </RECORD> + <RECORD> + <OPCODE>OP_END_LOG_SEGMENT</OPCODE> + <DATA> + <TXID>77</TXID> </DATA> </RECORD> </EDITS>