http://git-wip-us.apache.org/repos/asf/hadoop/blob/69f91d8c/hadoop-common-project/hadoop-common/dev-support/jdiff/Apache_Hadoop_Common_2.7.2.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/dev-support/jdiff/Apache_Hadoop_Common_2.7.2.xml
 
b/hadoop-common-project/hadoop-common/dev-support/jdiff/Apache_Hadoop_Common_2.7.2.xml
index 5ef99b2..47e64d8 100644
--- 
a/hadoop-common-project/hadoop-common/dev-support/jdiff/Apache_Hadoop_Common_2.7.2.xml
+++ 
b/hadoop-common-project/hadoop-common/dev-support/jdiff/Apache_Hadoop_Common_2.7.2.xml
@@ -1,7 +1,7 @@
 <?xml version="1.0" encoding="iso-8859-1" standalone="no"?>
 <!-- Generated by the JDiff Javadoc doclet -->
 <!-- (http://www.jdiff.org) -->
-<!-- on Thu Aug 18 16:00:16 PDT 2016 -->
+<!-- on Wed Aug 24 13:50:51 PDT 2016 -->
 
 <api
   xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'
@@ -9,7 +9,7 @@
   name="Apache Hadoop Common 2.7.2"
   jdversion="1.0.9">
 
-<!--  Command line arguments =  -doclet 
org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet 
-docletpath 
/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-common/target/hadoop-annotations.jar:/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-common/target/jdiff.jar
 -verbose -classpath 
/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-common/target/classes:/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-annotations/target/hadoop-annotations-2.7.2.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_92.jdk/Contents/Home/lib/tools.jar:/Users/wtan/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/wtan/.m2/repository/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/Users/wtan/.m2/repository/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/Users/wtan/.m2/repository/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/Users/wtan/.m2/repository/commons-
 
httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/Users/wtan/.m2/repository/commons-codec/commons-codec/1.4/commons-codec-1.4.jar:/Users/wtan/.m2/repository/commons-io/commons-io/2.4/commons-io-2.4.jar:/Users/wtan/.m2/repository/commons-net/commons-net/3.1/commons-net-3.1.jar:/Users/wtan/.m2/repository/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/Users/wtan/.m2/repository/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/Users/wtan/.m2/repository/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/Users/wtan/.m2/repository/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/Users/wtan/.m2/repository/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/Users/wtan/.m2/repository/com/sun/jersey/jersey-json/1.9/jersey-json-1.9.jar:/Users/wtan/.m2/repository/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/Users/wtan/.m2/repository/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/Users/wtan/.m2/repository/javax/xml/bind/jaxb-ap
 
i/2.2.2/jaxb-api-2.2.2.jar:/Users/wtan/.m2/repository/javax/xml/stream/stax-api/1.0-2/stax-api-1.0-2.jar:/Users/wtan/.m2/repository/javax/activation/activation/1.1/activation-1.1.jar:/Users/wtan/.m2/repository/org/codehaus/jackson/jackson-jaxrs/1.9.13/jackson-jaxrs-1.9.13.jar:/Users/wtan/.m2/repository/org/codehaus/jackson/jackson-xc/1.9.13/jackson-xc-1.9.13.jar:/Users/wtan/.m2/repository/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/Users/wtan/.m2/repository/asm/asm/3.2/asm-3.2.jar:/Users/wtan/.m2/repository/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/Users/wtan/.m2/repository/log4j/log4j/1.2.17/log4j-1.2.17.jar:/Users/wtan/.m2/repository/net/java/dev/jets3t/jets3t/0.9.0/jets3t-0.9.0.jar:/Users/wtan/.m2/repository/org/apache/httpcomponents/httpclient/4.2.5/httpclient-4.2.5.jar:/Users/wtan/.m2/repository/org/apache/httpcomponents/httpcore/4.2.5/httpcore-4.2.5.jar:/Users/wtan/.m2/repository/com/jamesmurty/utils/java-xmlbuilder/0.4/java-xmlbuilder-0.4.ja
 
r:/Users/wtan/.m2/repository/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/Users/wtan/.m2/repository/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/Users/wtan/.m2/repository/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/Users/wtan/.m2/repository/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar:/Users/wtan/.m2/repository/commons-beanutils/commons-beanutils-core/1.8.0/commons-beanutils-core-1.8.0.jar:/Users/wtan/.m2/repository/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/Users/wtan/.m2/repository/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/Users/wtan/.m2/repository/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/Users/wtan/.m2/repository/org/apache/avro/avro/1.7.4/avro-1.7.4.jar:/Users/wtan/.m2/repository/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3.jar:/Users/wtan/.m2/repository/org/xerial/snappy/snappy-java/1.0.4.1/snappy-java-1.0.4.1
 
.jar:/Users/wtan/.m2/repository/org/apache/ant/ant/1.8.1/ant-1.8.1.jar:/Users/wtan/.m2/repository/org/apache/ant/ant-launcher/1.8.1/ant-launcher-1.8.1.jar:/Users/wtan/.m2/repository/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/Users/wtan/.m2/repository/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.7.2.jar:/Users/wtan/.m2/repository/org/apache/directory/server/apacheds-kerberos-codec/2.0.0-M15/apacheds-kerberos-codec-2.0.0-M15.jar:/Users/wtan/.m2/repository/org/apache/directory/server/apacheds-i18n/2.0.0-M15/apacheds-i18n-2.0.0-M15.jar:/Users/wtan/.m2/repository/org/apache/directory/api/api-asn1-api/1.0.0-M20/api-asn1-api-1.0.0-M20.jar:/Users/wtan/.m2/repository/org/apache/directory/api/api-util/1.0.0-M20/api-util-1.0.0-M20.jar:/Users/wtan/.m2/repository/org/apache/curator/curator-framework/2.7.1/curator-framework-2.7.1.jar:/Users/wtan/.m2/repository/com/jcraft/j
 
sch/0.1.42/jsch-0.1.42.jar:/Users/wtan/.m2/repository/org/apache/curator/curator-client/2.7.1/curator-client-2.7.1.jar:/Users/wtan/.m2/repository/org/apache/curator/curator-recipes/2.7.1/curator-recipes-2.7.1.jar:/Users/wtan/.m2/repository/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/Users/wtan/.m2/repository/org/apache/htrace/htrace-core/3.1.0-incubating/htrace-core-3.1.0-incubating.jar:/Users/wtan/.m2/repository/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/Users/wtan/.m2/repository/io/netty/netty/3.6.2.Final/netty-3.6.2.Final.jar:/Users/wtan/.m2/repository/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar:/Users/wtan/.m2/repository/org/tukaani/xz/1.0/xz-1.0.jar
 -sourcepath 
/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-common/src/main/java
 -doclet 
org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet 
-docletpath 
/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-c
 
ommon/target/hadoop-annotations.jar:/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-common/target/jdiff.jar
 -apidir 
/Users/wtan/project/github/hadoop-common-trunk/hadoop-common-project/hadoop-common/target/site/jdiff/xml
 -apiname Apache Hadoop Common 2.7.2 -->
+<!--  Command line arguments =  -doclet 
org.apache.hadoop.classification.tools.IncludePublicAnnotationsJDiffDoclet 
-docletpath 
/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-common/target/hadoop-annotations.jar:/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-common/target/jdiff.jar
 -verbose -classpath 
/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-common/target/classes:/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-annotations/target/hadoop-annotations-2.7.2.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_40.jdk/Contents/Home/lib/tools.jar:/Users/vinodkv/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/vinodkv/.m2/repository/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/Users/vinodkv/.m2/repository/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/Users/vinodkv/.m2/repository/xml
 
enc/xmlenc/0.52/xmlenc-0.52.jar:/Users/vinodkv/.m2/repository/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/Users/vinodkv/.m2/repository/commons-codec/commons-codec/1.4/commons-codec-1.4.jar:/Users/vinodkv/.m2/repository/commons-io/commons-io/2.4/commons-io-2.4.jar:/Users/vinodkv/.m2/repository/commons-net/commons-net/3.1/commons-net-3.1.jar:/Users/vinodkv/.m2/repository/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/Users/vinodkv/.m2/repository/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/Users/vinodkv/.m2/repository/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/Users/vinodkv/.m2/repository/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/Users/vinodkv/.m2/repository/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/Users/vinodkv/.m2/repository/com/sun/jersey/jersey-json/1.9/jersey-json-1.9.jar:/Users/vinodkv/.m2/repository/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/Users/vinodkv/.m2/repository/com
 
/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/Users/vinodkv/.m2/repository/javax/xml/bind/jaxb-api/2.2.2/jaxb-api-2.2.2.jar:/Users/vinodkv/.m2/repository/javax/xml/stream/stax-api/1.0-2/stax-api-1.0-2.jar:/Users/vinodkv/.m2/repository/javax/activation/activation/1.1/activation-1.1.jar:/Users/vinodkv/.m2/repository/org/codehaus/jackson/jackson-jaxrs/1.9.13/jackson-jaxrs-1.9.13.jar:/Users/vinodkv/.m2/repository/org/codehaus/jackson/jackson-xc/1.9.13/jackson-xc-1.9.13.jar:/Users/vinodkv/.m2/repository/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/Users/vinodkv/.m2/repository/asm/asm/3.2/asm-3.2.jar:/Users/vinodkv/.m2/repository/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/Users/vinodkv/.m2/repository/log4j/log4j/1.2.17/log4j-1.2.17.jar:/Users/vinodkv/.m2/repository/net/java/dev/jets3t/jets3t/0.9.0/jets3t-0.9.0.jar:/Users/vinodkv/.m2/repository/org/apache/httpcomponents/httpclient/4.2.5/httpclient-4.2.5.jar:/Users/vinodkv/.m2/repository/org/apache/
 
httpcomponents/httpcore/4.2.5/httpcore-4.2.5.jar:/Users/vinodkv/.m2/repository/com/jamesmurty/utils/java-xmlbuilder/0.4/java-xmlbuilder-0.4.jar:/Users/vinodkv/.m2/repository/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/Users/vinodkv/.m2/repository/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/Users/vinodkv/.m2/repository/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/Users/vinodkv/.m2/repository/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar:/Users/vinodkv/.m2/repository/commons-beanutils/commons-beanutils-core/1.8.0/commons-beanutils-core-1.8.0.jar:/Users/vinodkv/.m2/repository/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/Users/vinodkv/.m2/repository/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/Users/vinodkv/.m2/repository/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/Users/vinodkv/.m2/repository/org/apache/avro/avro/1.7.4/avro-1.7.4.jar:/
 
Users/vinodkv/.m2/repository/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3.jar:/Users/vinodkv/.m2/repository/org/xerial/snappy/snappy-java/1.0.4.1/snappy-java-1.0.4.1.jar:/Users/vinodkv/.m2/repository/org/apache/ant/ant/1.8.1/ant-1.8.1.jar:/Users/vinodkv/.m2/repository/org/apache/ant/ant-launcher/1.8.1/ant-launcher-1.8.1.jar:/Users/vinodkv/.m2/repository/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/Users/vinodkv/.m2/repository/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.7.2.jar:/Users/vinodkv/.m2/repository/org/apache/directory/server/apacheds-kerberos-codec/2.0.0-M15/apacheds-kerberos-codec-2.0.0-M15.jar:/Users/vinodkv/.m2/repository/org/apache/directory/server/apacheds-i18n/2.0.0-M15/apacheds-i18n-2.0.0-M15.jar:/Users/vinodkv/.m2/repository/org/apache/directory/api/api-asn1-api/1.0.0-M20/api-asn1-api-1.0.0-M20.jar:/Users/vinodkv/.m2/re
 
pository/org/apache/directory/api/api-util/1.0.0-M20/api-util-1.0.0-M20.jar:/Users/vinodkv/.m2/repository/org/apache/curator/curator-framework/2.7.1/curator-framework-2.7.1.jar:/Users/vinodkv/.m2/repository/com/jcraft/jsch/0.1.42/jsch-0.1.42.jar:/Users/vinodkv/.m2/repository/org/apache/curator/curator-client/2.7.1/curator-client-2.7.1.jar:/Users/vinodkv/.m2/repository/org/apache/curator/curator-recipes/2.7.1/curator-recipes-2.7.1.jar:/Users/vinodkv/.m2/repository/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/Users/vinodkv/.m2/repository/org/apache/htrace/htrace-core/3.1.0-incubating/htrace-core-3.1.0-incubating.jar:/Users/vinodkv/.m2/repository/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/Users/vinodkv/.m2/repository/io/netty/netty/3.6.2.Final/netty-3.6.2.Final.jar:/Users/vinodkv/.m2/repository/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar:/Users/vinodkv/.m2/repository/org/tukaani/xz/1.0/xz-1.0.jar
 -sourcepath /Users/vinodkv/Workspace/ec
 
lipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-common/src/main/java
 -doclet 
org.apache.hadoop.classification.tools.IncludePublicAnnotationsJDiffDoclet 
-docletpath 
/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-common/target/hadoop-annotations.jar:/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-common/target/jdiff.jar
 -apidir 
/Users/vinodkv/Workspace/eclipse-workspace/apache-git/hadoop/hadoop-common-project/hadoop-common/target/site/jdiff/xml
 -apiname Apache Hadoop Common 2.7.2 -->
 <package name="org.apache.hadoop">
   <!-- start class org.apache.hadoop.HadoopIllegalArgumentException -->
   <class name="HadoopIllegalArgumentException" 
extends="java.lang.IllegalArgumentException"
@@ -1547,91 +1547,6 @@
     </doc>
   </class>
   <!-- end class org.apache.hadoop.conf.Configuration -->
-  <!-- start class org.apache.hadoop.conf.Configuration.DeprecationDelta -->
-  <class name="Configuration.DeprecationDelta" extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="DeprecationDelta" type="java.lang.String, 
java.lang.String, java.lang.String"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <constructor name="DeprecationDelta" type="java.lang.String, 
java.lang.String"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="getKey" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getNewKeys" return="java.lang.String[]"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getCustomMessage" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <doc>
-    <![CDATA[A pending addition to the global set of deprecated keys.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.conf.Configuration.DeprecationDelta -->
-  <!-- start class org.apache.hadoop.conf.Configuration.IntegerRanges -->
-  <class name="Configuration.IntegerRanges" extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <implements name="java.lang.Iterable"/>
-    <constructor name="IntegerRanges"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <constructor name="IntegerRanges" type="java.lang.String"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="isIncluded" return="boolean"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="value" type="int"/>
-      <doc>
-      <![CDATA[Is the given value in the set of ranges
- @param value the value to check
- @return is the value in the ranges?]]>
-      </doc>
-    </method>
-    <method name="isEmpty" return="boolean"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[@return true if there are no values in this range, else 
false.]]>
-      </doc>
-    </method>
-    <method name="toString" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="iterator" return="java.util.Iterator"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <doc>
-    <![CDATA[A class that represents a set of positive integer ranges. It 
parses
- strings of the form: "2-3,5,7-" where ranges are separated by comma and
- the lower/upper bounds are separated by dash. Either the lower or upper
- bound may be omitted meaning all values up to or over. So the string
- above means 2, 3, 5, and 7, 8, 9, ...]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.conf.Configuration.IntegerRanges -->
   <!-- start class org.apache.hadoop.conf.Configured -->
   <class name="Configured" extends="java.lang.Object"
     abstract="false"
@@ -1668,285 +1583,6 @@
     </doc>
   </class>
   <!-- end class org.apache.hadoop.conf.Configured -->
-  <!-- start class org.apache.hadoop.conf.ConfServlet.BadFormatException -->
-  <class name="ConfServlet.BadFormatException" extends="java.lang.Exception"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="BadFormatException" type="java.lang.String"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-  </class>
-  <!-- end class org.apache.hadoop.conf.ConfServlet.BadFormatException -->
-  <!-- start interface org.apache.hadoop.conf.Reconfigurable -->
-  <interface name="Reconfigurable"    abstract="true"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <implements name="org.apache.hadoop.conf.Configurable"/>
-    <method name="reconfigureProperty" return="java.lang.String"
-      abstract="true" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="property" type="java.lang.String"/>
-      <param name="newVal" type="java.lang.String"/>
-      <exception name="ReconfigurationException" 
type="org.apache.hadoop.conf.ReconfigurationException"/>
-      <doc>
-      <![CDATA[Change a configuration property on this object to the value 
specified.
-
- Change a configuration property on this object to the value specified
- and return the previous value that the configuration property was set to
- (or null if it was not previously set). If newVal is null, set the property
- to its default value;
-
- If the property cannot be changed, throw a
- {@link ReconfigurationException}.]]>
-      </doc>
-    </method>
-    <method name="isPropertyReconfigurable" return="boolean"
-      abstract="true" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="property" type="java.lang.String"/>
-      <doc>
-      <![CDATA[Return whether a given property is changeable at run time.
-
- If isPropertyReconfigurable returns true for a property,
- then changeConf should not throw an exception when changing
- this property.]]>
-      </doc>
-    </method>
-    <method name="getReconfigurableProperties" return="java.util.Collection"
-      abstract="true" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Return all the properties that can be changed at run time.]]>
-      </doc>
-    </method>
-    <doc>
-    <![CDATA[Something whose {@link Configuration} can be changed at run 
time.]]>
-    </doc>
-  </interface>
-  <!-- end interface org.apache.hadoop.conf.Reconfigurable -->
-  <!-- start class org.apache.hadoop.conf.ReconfigurableBase -->
-  <class name="ReconfigurableBase" extends="org.apache.hadoop.conf.Configured"
-    abstract="true"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <implements name="org.apache.hadoop.conf.Reconfigurable"/>
-    <constructor name="ReconfigurableBase"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Construct a ReconfigurableBase.]]>
-      </doc>
-    </constructor>
-    <constructor name="ReconfigurableBase" 
type="org.apache.hadoop.conf.Configuration"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Construct a ReconfigurableBase with the {@link Configuration}
- conf.]]>
-      </doc>
-    </constructor>
-    <method name="setReconfigurationUtil"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="ru" type="org.apache.hadoop.conf.ReconfigurationUtil"/>
-    </method>
-    <method name="getChangedProperties" return="java.util.Collection"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="newConf" type="org.apache.hadoop.conf.Configuration"/>
-      <param name="oldConf" type="org.apache.hadoop.conf.Configuration"/>
-    </method>
-    <method name="startReconfigurationTask"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <exception name="IOException" type="java.io.IOException"/>
-      <doc>
-      <![CDATA[Start a reconfiguration task to reload configuration in 
background.]]>
-      </doc>
-    </method>
-    <method name="getReconfigurationTaskStatus" 
return="org.apache.hadoop.conf.ReconfigurationTaskStatus"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="shutdownReconfigurationTask"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="reconfigureProperty" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="true" visibility="public"
-      deprecated="not deprecated">
-      <param name="property" type="java.lang.String"/>
-      <param name="newVal" type="java.lang.String"/>
-      <exception name="ReconfigurationException" 
type="org.apache.hadoop.conf.ReconfigurationException"/>
-      <doc>
-      <![CDATA[{@inheritDoc}
-
- This method makes the change to this objects {@link Configuration}
- and calls reconfigurePropertyImpl to update internal data structures.
- This method cannot be overridden, subclasses should instead override
- reconfigureProperty.]]>
-      </doc>
-    </method>
-    <method name="getReconfigurableProperties" return="java.util.Collection"
-      abstract="true" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[{@inheritDoc}
-
- Subclasses must override this.]]>
-      </doc>
-    </method>
-    <method name="isPropertyReconfigurable" return="boolean"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="property" type="java.lang.String"/>
-      <doc>
-      <![CDATA[{@inheritDoc}
-
- Subclasses may wish to override this with a more efficient implementation.]]>
-      </doc>
-    </method>
-    <method name="reconfigurePropertyImpl"
-      abstract="true" native="false" synchronized="false"
-      static="false" final="false" visibility="protected"
-      deprecated="not deprecated">
-      <param name="property" type="java.lang.String"/>
-      <param name="newVal" type="java.lang.String"/>
-      <exception name="ReconfigurationException" 
type="org.apache.hadoop.conf.ReconfigurationException"/>
-      <doc>
-      <![CDATA[Change a configuration property.
-
- Subclasses must override this. This method applies the change to
- all internal data structures derived from the configuration property
- that is being changed. If this object owns other Reconfigurable objects
- reconfigureProperty should be called recursively to make sure that
- to make sure that the configuration of these objects is updated.]]>
-      </doc>
-    </method>
-    <doc>
-    <![CDATA[Utility base class for implementing the Reconfigurable interface.
-
- Subclasses should override reconfigurePropertyImpl to change individual
- properties and getReconfigurableProperties to get all properties that
- can be changed at run time.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.conf.ReconfigurableBase -->
-  <!-- start class org.apache.hadoop.conf.ReconfigurationException -->
-  <class name="ReconfigurationException" extends="java.lang.Exception"
-    abstract="false"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="ReconfigurationException"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Create a new instance of {@link ReconfigurationException}.]]>
-      </doc>
-    </constructor>
-    <constructor name="ReconfigurationException" type="java.lang.String, 
java.lang.String, java.lang.String, java.lang.Throwable"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Create a new instance of {@link ReconfigurationException}.]]>
-      </doc>
-    </constructor>
-    <constructor name="ReconfigurationException" type="java.lang.String, 
java.lang.String, java.lang.String"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Create a new instance of {@link ReconfigurationException}.]]>
-      </doc>
-    </constructor>
-    <method name="getProperty" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Get property that cannot be changed.]]>
-      </doc>
-    </method>
-    <method name="getNewValue" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Get value to which property was supposed to be changed.]]>
-      </doc>
-    </method>
-    <method name="getOldValue" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Get old value of property that cannot be changed.]]>
-      </doc>
-    </method>
-    <doc>
-    <![CDATA[Exception indicating that configuration property cannot be changed
- at run time.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.conf.ReconfigurationException -->
-  <!-- start class org.apache.hadoop.conf.ReconfigurationServlet -->
-  <class name="ReconfigurationServlet" extends="javax.servlet.http.HttpServlet"
-    abstract="false"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="ReconfigurationServlet"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="init"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <exception name="ServletException" 
type="javax.servlet.ServletException"/>
-    </method>
-    <method name="doGet"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="protected"
-      deprecated="not deprecated">
-      <param name="req" type="javax.servlet.http.HttpServletRequest"/>
-      <param name="resp" type="javax.servlet.http.HttpServletResponse"/>
-      <exception name="ServletException" 
type="javax.servlet.ServletException"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <method name="doPost"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="protected"
-      deprecated="not deprecated">
-      <param name="req" type="javax.servlet.http.HttpServletRequest"/>
-      <param name="resp" type="javax.servlet.http.HttpServletResponse"/>
-      <exception name="ServletException" 
type="javax.servlet.ServletException"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <field name="CONF_SERVLET_RECONFIGURABLE_PREFIX" type="java.lang.String"
-      transient="false" volatile="false"
-      static="true" final="true" visibility="public"
-      deprecated="not deprecated">
-    </field>
-    <doc>
-    <![CDATA[A servlet for changing a node's configuration.
-
- Reloads the configuration file, verifies whether changes are
- possible and asks the admin to approve the change.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.conf.ReconfigurationServlet -->
   <!-- start class org.apache.hadoop.conf.ReconfigurationTaskStatus -->
   <class name="ReconfigurationTaskStatus" extends="java.lang.Object"
     abstract="false"
@@ -1992,221 +1628,13 @@
     </method>
   </class>
   <!-- end class org.apache.hadoop.conf.ReconfigurationTaskStatus -->
-  <!-- start class org.apache.hadoop.conf.ReconfigurationUtil -->
-  <class name="ReconfigurationUtil" extends="java.lang.Object"
-    abstract="false"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="ReconfigurationUtil"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="getChangedProperties" return="java.util.Collection"
-      abstract="false" native="false" synchronized="false"
-      static="true" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="newConf" type="org.apache.hadoop.conf.Configuration"/>
-      <param name="oldConf" type="org.apache.hadoop.conf.Configuration"/>
-    </method>
-    <method name="parseChangedProperties" return="java.util.Collection"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="newConf" type="org.apache.hadoop.conf.Configuration"/>
-      <param name="oldConf" type="org.apache.hadoop.conf.Configuration"/>
-    </method>
-  </class>
-  <!-- end class org.apache.hadoop.conf.ReconfigurationUtil -->
-  <!-- start class org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange 
-->
-  <class name="ReconfigurationUtil.PropertyChange" extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="PropertyChange" type="java.lang.String, 
java.lang.String, java.lang.String"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <field name="prop" type="java.lang.String"
-      transient="false" volatile="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </field>
-    <field name="oldVal" type="java.lang.String"
-      transient="false" volatile="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </field>
-    <field name="newVal" type="java.lang.String"
-      transient="false" volatile="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </field>
-  </class>
-  <!-- end class org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange -->
   <doc>
   <![CDATA[Configuration of system parameters.]]>
   </doc>
 </package>
 <package name="org.apache.hadoop.crypto">
-  <!-- start class org.apache.hadoop.crypto.UnsupportedCodecException -->
-  <class name="UnsupportedCodecException" extends="java.lang.RuntimeException"
-    abstract="false"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="UnsupportedCodecException"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Default constructor]]>
-      </doc>
-    </constructor>
-    <constructor name="UnsupportedCodecException" type="java.lang.String"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Constructs an UnsupportedCodecException with the specified
- detail message.
-
- @param message the detail message]]>
-      </doc>
-    </constructor>
-    <constructor name="UnsupportedCodecException" type="java.lang.String, 
java.lang.Throwable"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Constructs a new exception with the specified detail message and
- cause.
-
- @param message the detail message
- @param cause the cause]]>
-      </doc>
-    </constructor>
-    <constructor name="UnsupportedCodecException" type="java.lang.Throwable"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <doc>
-      <![CDATA[Constructs a new exception with the specified cause.
-
- @param cause the cause]]>
-      </doc>
-    </constructor>
-    <doc>
-    <![CDATA[Thrown to indicate that the specific codec is not supported.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.crypto.UnsupportedCodecException -->
 </package>
 <package name="org.apache.hadoop.crypto.key">
-  <!-- start class org.apache.hadoop.crypto.key.CachingKeyProvider -->
-  <class name="CachingKeyProvider" 
extends="org.apache.hadoop.crypto.key.KeyProviderExtension"
-    abstract="false"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="CachingKeyProvider" 
type="org.apache.hadoop.crypto.key.KeyProvider, long, long"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="getCurrentKey" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <method name="getKeyVersion" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="versionName" type="java.lang.String"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <method name="deleteKey"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <method name="rollNewVersion" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
-      <param name="material" type="byte[]"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <method name="rollNewVersion" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
-      <exception name="NoSuchAlgorithmException" 
type="java.security.NoSuchAlgorithmException"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <method name="getMetadata" 
return="org.apache.hadoop.crypto.key.KeyProvider.Metadata"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <doc>
-    <![CDATA[A <code>KeyProviderExtension</code> implementation providing a 
short lived
- cache for <code>KeyVersions</code> and <code>Metadata</code>to avoid burst
- of requests to hit the underlying <code>KeyProvider</code>.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.crypto.key.CachingKeyProvider -->
-  <!-- start class org.apache.hadoop.crypto.key.JavaKeyStoreProvider.Factory 
-->
-  <class name="JavaKeyStoreProvider.Factory" 
extends="org.apache.hadoop.crypto.key.KeyProviderFactory"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="Factory"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="createProvider" 
return="org.apache.hadoop.crypto.key.KeyProvider"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="providerName" type="java.net.URI"/>
-      <param name="conf" type="org.apache.hadoop.conf.Configuration"/>
-      <exception name="IOException" type="java.io.IOException"/>
-    </method>
-    <doc>
-    <![CDATA[The factory to create JksProviders, which is used by the 
ServiceLoader.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.crypto.key.JavaKeyStoreProvider.Factory -->
-  <!-- start class 
org.apache.hadoop.crypto.key.JavaKeyStoreProvider.KeyMetadata -->
-  <class name="JavaKeyStoreProvider.KeyMetadata" extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <implements name="java.security.Key"/>
-    <implements name="java.io.Serializable"/>
-    <method name="getAlgorithm" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getFormat" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getEncoded" return="byte[]"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <doc>
-    <![CDATA[An adapter between a KeyStore Key and our Metadata. This is used 
to store
- the metadata in a KeyStore even though isn't really a key.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.crypto.key.JavaKeyStoreProvider.KeyMetadata 
-->
   <!-- start class org.apache.hadoop.crypto.key.KeyProvider -->
   <class name="KeyProvider" extends="java.lang.Object"
     abstract="true"
@@ -2523,1648 +1951,1558 @@
     </doc>
   </class>
   <!-- end class org.apache.hadoop.crypto.key.KeyProvider -->
-  <!-- start class org.apache.hadoop.crypto.key.KeyProvider.KeyVersion -->
-  <class name="KeyProvider.KeyVersion" extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
+  <!-- start class org.apache.hadoop.crypto.key.KeyProviderFactory -->
+  <class name="KeyProviderFactory" extends="java.lang.Object"
+    abstract="true"
+    static="false" final="false" visibility="public"
     deprecated="not deprecated">
-    <constructor name="KeyVersion" type="java.lang.String, java.lang.String, 
byte[]"
-      static="false" final="false" visibility="protected"
+    <constructor name="KeyProviderFactory"
+      static="false" final="false" visibility="public"
       deprecated="not deprecated">
     </constructor>
-    <method name="getName" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
+    <method name="createProvider" 
return="org.apache.hadoop.crypto.key.KeyProvider"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="providerName" type="java.net.URI"/>
+      <param name="conf" type="org.apache.hadoop.conf.Configuration"/>
+      <exception name="IOException" type="java.io.IOException"/>
     </method>
-    <method name="getVersionName" return="java.lang.String"
+    <method name="getProviders" return="java.util.List"
       abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
+      static="true" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="conf" type="org.apache.hadoop.conf.Configuration"/>
+      <exception name="IOException" type="java.io.IOException"/>
     </method>
-    <method name="getMaterial" return="byte[]"
+    <method name="get" return="org.apache.hadoop.crypto.key.KeyProvider"
       abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
+      static="true" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="uri" type="java.net.URI"/>
+      <param name="conf" type="org.apache.hadoop.conf.Configuration"/>
+      <exception name="IOException" type="java.io.IOException"/>
+      <doc>
+      <![CDATA[Create a KeyProvider based on a provided URI.
+
+ @param uri key provider URI
+ @param conf configuration to initialize the key provider
+ @return the key provider for the specified URI, or <code>NULL</code> if
+         a provider for the specified URI scheme could not be found.
+ @throws IOException thrown if the provider failed to initialize.]]>
+      </doc>
     </method>
-    <method name="toString" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
+    <field name="KEY_PROVIDER_PATH" type="java.lang.String"
+      transient="false" volatile="false"
+      static="true" final="true" visibility="public"
       deprecated="not deprecated">
-    </method>
+    </field>
     <doc>
-    <![CDATA[The combination of both the key version name and the key 
material.]]>
+    <![CDATA[A factory to create a list of KeyProvider based on the path given 
in a
+ Configuration. It uses a service loader interface to find the available
+ KeyProviders and create them based on the list of URIs.]]>
     </doc>
   </class>
-  <!-- end class org.apache.hadoop.crypto.key.KeyProvider.KeyVersion -->
-  <!-- start class org.apache.hadoop.crypto.key.KeyProvider.Metadata -->
-  <class name="KeyProvider.Metadata" extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
+  <!-- end class org.apache.hadoop.crypto.key.KeyProviderFactory -->
+</package>
+<package name="org.apache.hadoop.crypto.key.kms">
+</package>
+<package name="org.apache.hadoop.crypto.random">
+</package>
+<package name="org.apache.hadoop.fs">
+  <!-- start class org.apache.hadoop.fs.AbstractFileSystem -->
+  <class name="AbstractFileSystem" extends="java.lang.Object"
+    abstract="true"
+    static="false" final="false" visibility="public"
     deprecated="not deprecated">
-    <constructor name="Metadata" type="java.lang.String, int, 
java.lang.String, java.util.Map, java.util.Date, int"
-      static="false" final="false" visibility="protected"
-      deprecated="not deprecated">
-    </constructor>
-    <constructor name="Metadata" type="byte[]"
-      static="false" final="false" visibility="protected"
+    <constructor name="AbstractFileSystem" type="java.net.URI, 
java.lang.String, boolean, int"
+      static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <exception name="IOException" type="java.io.IOException"/>
+      <exception name="URISyntaxException" type="java.net.URISyntaxException"/>
       <doc>
-      <![CDATA[Deserialize a new metadata object from a set of bytes.
- @param bytes the serialized metadata
- @throws IOException]]>
+      <![CDATA[Constructor to be called by subclasses.
+
+ @param uri for this file system.
+ @param supportedScheme the scheme supported by the implementor
+ @param authorityNeeded if true then theURI must have authority, if false
+          then the URI must have null authority.
+
+ @throws URISyntaxException <code>uri</code> has syntax error]]>
       </doc>
     </constructor>
-    <method name="toString" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getDescription" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getCreated" return="java.util.Date"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getCipher" return="java.lang.String"
+    <method name="getStatistics" 
return="org.apache.hadoop.fs.FileSystem.Statistics"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
     </method>
-    <method name="getAttributes" return="java.util.Map"
+    <method name="isValidName" return="boolean"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="src" type="java.lang.String"/>
+      <doc>
+      <![CDATA[Returns true if the specified string is considered valid in the 
path part
+ of a URI by this file system.  The default implementation enforces the rules
+ of HDFS, but subclasses may override this method to implement specific
+ validation rules for specific file systems.
+
+ @param src String source filename to check, path part of the URI
+ @return boolean true if the specified string is considered valid]]>
+      </doc>
     </method>
-    <method name="getAlgorithm" return="java.lang.String"
+    <method name="createFileSystem" 
return="org.apache.hadoop.fs.AbstractFileSystem"
       abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
+      static="true" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="uri" type="java.net.URI"/>
+      <param name="conf" type="org.apache.hadoop.conf.Configuration"/>
+      <exception name="UnsupportedFileSystemException" 
type="org.apache.hadoop.fs.UnsupportedFileSystemException"/>
       <doc>
-      <![CDATA[Get the algorithm from the cipher.
- @return the algorithm name]]>
+      <![CDATA[Create a file system instance for the specified uri using the 
conf. The
+ conf is used to find the class name that implements the file system. The
+ conf is also passed to the file system for its configuration.
+
+ @param uri URI of the file system
+ @param conf Configuration for the file system
+
+ @return Returns the file system for the given URI
+
+ @throws UnsupportedFileSystemException file system for <code>uri</code> is
+           not found]]>
       </doc>
     </method>
-    <method name="getBitLength" return="int"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </method>
-    <method name="getVersions" return="int"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
+    <method name="getStatistics" 
return="org.apache.hadoop.fs.FileSystem.Statistics"
+      abstract="false" native="false" synchronized="true"
+      static="true" final="false" visibility="protected"
       deprecated="not deprecated">
+      <param name="uri" type="java.net.URI"/>
+      <doc>
+      <![CDATA[Get the statistics for a particular file system.
+
+ @param uri
+          used as key to lookup STATISTICS_TABLE. Only scheme and authority
+          part of the uri are used.
+ @return a statistics object]]>
+      </doc>
     </method>
-    <method name="addVersion" return="int"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="protected"
+    <method name="clearStatistics"
+      abstract="false" native="false" synchronized="true"
+      static="true" final="false" visibility="public"
       deprecated="not deprecated">
     </method>
-    <method name="serialize" return="byte[]"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="protected"
+    <method name="printStatistics"
+      abstract="false" native="false" synchronized="true"
+      static="true" final="false" visibility="public"
       deprecated="not deprecated">
-      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[Serialize the metadata to a set of bytes.
- @return the serialized bytes
- @throws IOException]]>
+      <![CDATA[Prints statistics for all file systems.]]>
       </doc>
     </method>
-    <doc>
-    <![CDATA[Key metadata that is associated with the key.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.crypto.key.KeyProvider.Metadata -->
-  <!-- start class org.apache.hadoop.crypto.key.KeyProvider.Options -->
-  <class name="KeyProvider.Options" extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="Options" type="org.apache.hadoop.conf.Configuration"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="setCipher" 
return="org.apache.hadoop.crypto.key.KeyProvider.Options"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
+    <method name="getAllStatistics" return="java.util.Map"
+      abstract="false" native="false" synchronized="true"
+      static="true" final="false" visibility="protected"
       deprecated="not deprecated">
-      <param name="cipher" type="java.lang.String"/>
     </method>
-    <method name="setBitLength" 
return="org.apache.hadoop.crypto.key.KeyProvider.Options"
+    <method name="get" return="org.apache.hadoop.fs.AbstractFileSystem"
       abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
+      static="true" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="bitLength" type="int"/>
+      <param name="uri" type="java.net.URI"/>
+      <param name="conf" type="org.apache.hadoop.conf.Configuration"/>
+      <exception name="UnsupportedFileSystemException" 
type="org.apache.hadoop.fs.UnsupportedFileSystemException"/>
+      <doc>
+      <![CDATA[The main factory method for creating a file system. Get a file 
system for
+ the URI's scheme and authority. The scheme of the <code>uri</code>
+ determines a configuration property name,
+ <tt>fs.AbstractFileSystem.<i>scheme</i>.impl</tt> whose value names the
+ AbstractFileSystem class.
+
+ The entire URI and conf is passed to the AbstractFileSystem factory method.
+
+ @param uri for the file system to be created.
+ @param conf which is passed to the file system impl.
+
+ @return file system for the given URI.
+
+ @throws UnsupportedFileSystemException if the file system for
+           <code>uri</code> is not supported.]]>
+      </doc>
     </method>
-    <method name="setDescription" 
return="org.apache.hadoop.crypto.key.KeyProvider.Options"
+    <method name="checkScheme"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="description" type="java.lang.String"/>
+      <param name="uri" type="java.net.URI"/>
+      <param name="supportedScheme" type="java.lang.String"/>
+      <doc>
+      <![CDATA[Check that the Uri's scheme matches
+ @param uri
+ @param supportedScheme]]>
+      </doc>
     </method>
-    <method name="setAttributes" 
return="org.apache.hadoop.crypto.key.KeyProvider.Options"
-      abstract="false" native="false" synchronized="false"
+    <method name="getUriDefaultPort" return="int"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="attributes" type="java.util.Map"/>
+      <doc>
+      <![CDATA[The default port of this file system.
+
+ @return default port of this file system's Uri scheme
+         A uri with a port of -1 => default port;]]>
+      </doc>
     </method>
-    <method name="getCipher" return="java.lang.String"
+    <method name="getUri" return="java.net.URI"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <doc>
+      <![CDATA[Returns a URI whose scheme and authority identify this 
FileSystem.
+
+ @return the uri of this file system.]]>
+      </doc>
     </method>
-    <method name="getBitLength" return="int"
+    <method name="checkPath"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="path" type="org.apache.hadoop.fs.Path"/>
+      <doc>
+      <![CDATA[Check that a Path belongs to this FileSystem.
+
+ If the path is fully qualified URI, then its scheme and authority
+ matches that of this file system. Otherwise the path must be
+ slash-relative name.
+
+ @throws InvalidPathException if the path is invalid]]>
+      </doc>
     </method>
-    <method name="getDescription" return="java.lang.String"
+    <method name="getUriPath" return="java.lang.String"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="p" type="org.apache.hadoop.fs.Path"/>
+      <doc>
+      <![CDATA[Get the path-part of a pathname. Checks that URI matches this 
file system
+ and that the path-part is a valid name.
+
+ @param p path
+
+ @return path-part of the Path p]]>
+      </doc>
     </method>
-    <method name="getAttributes" return="java.util.Map"
+    <method name="makeQualified" return="org.apache.hadoop.fs.Path"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="path" type="org.apache.hadoop.fs.Path"/>
+      <doc>
+      <![CDATA[Make the path fully qualified to this file system
+ @param path
+ @return the qualified path]]>
+      </doc>
     </method>
-    <method name="toString" return="java.lang.String"
+    <method name="getInitialWorkingDirectory" 
return="org.apache.hadoop.fs.Path"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-    </method>
-    <doc>
-    <![CDATA[Options when creating key objects.]]>
-    </doc>
-  </class>
-  <!-- end class org.apache.hadoop.crypto.key.KeyProvider.Options -->
-  <!-- start interface 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension -->
-  <interface name="KeyProviderCryptoExtension.CryptoExtension"    
abstract="true"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <implements 
name="org.apache.hadoop.crypto.key.KeyProviderExtension.Extension"/>
-    <method name="warmUpEncryptedKeys"
-      abstract="true" native="false" synchronized="false"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-      <param name="keyNames" type="java.lang.String[]"/>
-      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[Calls to this method allows the underlying KeyProvider to 
warm-up any
- implementation specific caches used to store the Encrypted Keys.
- @param keyNames Array of Key Names]]>
+      <![CDATA[Some file systems like LocalFileSystem have an initial 
workingDir
+ that is used as the starting workingDir. For other file systems
+ like HDFS there is no built in notion of an initial workingDir.
+
+ @return the initial workingDir if the file system has such a notion
+         otherwise return a null.]]>
       </doc>
     </method>
-    <method name="drain"
-      abstract="true" native="false" synchronized="false"
+    <method name="getHomeDirectory" return="org.apache.hadoop.fs.Path"
+      abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="keyName" type="java.lang.String"/>
       <doc>
-      <![CDATA[Drains the Queue for the provided key.
+      <![CDATA[Return the current user's home directory in this file system.
+ The default implementation returns "/user/$USER/".
 
- @param keyName the key to drain the Queue for]]>
+ @return current user's home directory.]]>
       </doc>
     </method>
-    <method name="generateEncryptedKey" 
return="org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion"
+    <method name="getServerDefaults" 
return="org.apache.hadoop.fs.FsServerDefaults"
       abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="encryptionKeyName" type="java.lang.String"/>
       <exception name="IOException" type="java.io.IOException"/>
-      <exception name="GeneralSecurityException" 
type="java.security.GeneralSecurityException"/>
       <doc>
-      <![CDATA[Generates a key material and encrypts it using the given key 
version name
- and initialization vector. The generated key material is of the same
- length as the <code>KeyVersion</code> material of the latest key version
- of the key and is encrypted using the same cipher.
- <p/>
- NOTE: The generated key is not stored by the <code>KeyProvider</code>
+      <![CDATA[Return a set of server default configuration values.
 
- @param encryptionKeyName
-          The latest KeyVersion of this key's material will be encrypted.
- @return EncryptedKeyVersion with the generated key material, the version
-         name is 'EEK' (for Encrypted Encryption Key)
- @throws IOException
-           thrown if the key material could not be generated
- @throws GeneralSecurityException
-           thrown if the key material could not be encrypted because of a
-           cryptographic issue.]]>
+ @return server default configuration values
+
+ @throws IOException an I/O error occurred]]>
       </doc>
     </method>
-    <method name="decryptEncryptedKey" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="true" native="false" synchronized="false"
+    <method name="resolvePath" return="org.apache.hadoop.fs.Path"
+      abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="encryptedKeyVersion" 
type="org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion"/>
+      <param name="p" type="org.apache.hadoop.fs.Path"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
       <exception name="IOException" type="java.io.IOException"/>
-      <exception name="GeneralSecurityException" 
type="java.security.GeneralSecurityException"/>
       <doc>
-      <![CDATA[Decrypts an encrypted byte[] key material using the given a key 
version
- name and initialization vector.
-
- @param encryptedKeyVersion
-          contains keyVersionName and IV to decrypt the encrypted key
-          material
- @return a KeyVersion with the decrypted key material, the version name is
-         'EK' (For Encryption Key)
- @throws IOException
-           thrown if the key material could not be decrypted
- @throws GeneralSecurityException
-           thrown if the key material could not be decrypted because of a
-           cryptographic issue.]]>
+      <![CDATA[Return the fully-qualified path of path f resolving the path
+ through any internal symlinks or mount point
+ @param p path to be resolved
+ @return fully qualified path
+ @throws FileNotFoundException, AccessControlException, IOException
+         UnresolvedLinkException if symbolic link on path cannot be resolved
+          internally]]>
       </doc>
     </method>
-    <doc>
-    <![CDATA[CryptoExtension is a type of Extension that exposes methods to 
generate
- EncryptedKeys and to decrypt the same.]]>
-    </doc>
-  </interface>
-  <!-- end interface 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension -->
-  <!-- start class 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion -->
-  <class name="KeyProviderCryptoExtension.EncryptedKeyVersion" 
extends="java.lang.Object"
-    abstract="false"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="EncryptedKeyVersion" type="java.lang.String, 
java.lang.String, byte[], org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      static="false" final="false" visibility="protected"
+    <method name="create" return="org.apache.hadoop.fs.FSDataOutputStream"
+      abstract="false" native="false" synchronized="false"
+      static="false" final="true" visibility="public"
       deprecated="not deprecated">
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="createFlag" type="java.util.EnumSet"/>
+      <param name="opts" type="org.apache.hadoop.fs.Options.CreateOpts[]"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileAlreadyExistsException" 
type="org.apache.hadoop.fs.FileAlreadyExistsException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="ParentNotDirectoryException" 
type="org.apache.hadoop.fs.ParentNotDirectoryException"/>
+      <exception name="UnsupportedFileSystemException" 
type="org.apache.hadoop.fs.UnsupportedFileSystemException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[Create a new EncryptedKeyVersion.
-
- @param keyName                  Name of the encryption key used to
-                                 encrypt the encrypted key.
- @param encryptionKeyVersionName Version name of the encryption key used
-                                 to encrypt the encrypted key.
- @param encryptedKeyIv           Initialization vector of the encrypted
-                                 key. The IV of the encryption key used to
-                                 encrypt the encrypted key is derived from
-                                 this IV.
- @param encryptedKeyVersion      The encrypted encryption key version.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
+ that the Path f must be fully qualified and the permission is absolute
+ (i.e. umask has been applied).]]>
       </doc>
-    </constructor>
-    <method name="createForDecryption" 
return="org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion"
-      abstract="false" native="false" synchronized="false"
-      static="true" final="false" visibility="public"
+    </method>
+    <method name="createInternal" 
return="org.apache.hadoop.fs.FSDataOutputStream"
+      abstract="true" native="false" synchronized="false"
+      static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="keyName" type="java.lang.String"/>
-      <param name="encryptionKeyVersionName" type="java.lang.String"/>
-      <param name="encryptedKeyIv" type="byte[]"/>
-      <param name="encryptedKeyMaterial" type="byte[]"/>
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="flag" type="java.util.EnumSet"/>
+      <param name="absolutePermission" 
type="org.apache.hadoop.fs.permission.FsPermission"/>
+      <param name="bufferSize" type="int"/>
+      <param name="replication" type="short"/>
+      <param name="blockSize" type="long"/>
+      <param name="progress" type="org.apache.hadoop.util.Progressable"/>
+      <param name="checksumOpt" 
type="org.apache.hadoop.fs.Options.ChecksumOpt"/>
+      <param name="createParent" type="boolean"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileAlreadyExistsException" 
type="org.apache.hadoop.fs.FileAlreadyExistsException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="ParentNotDirectoryException" 
type="org.apache.hadoop.fs.ParentNotDirectoryException"/>
+      <exception name="UnsupportedFileSystemException" 
type="org.apache.hadoop.fs.UnsupportedFileSystemException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[Factory method to create a new EncryptedKeyVersion that can 
then be
- passed into {@link #decryptEncryptedKey}. Note that the fields of the
- returned EncryptedKeyVersion will only partially be populated; it is not
- necessarily suitable for operations besides decryption.
-
- @param keyName Key name of the encryption key use to encrypt the
-                encrypted key.
- @param encryptionKeyVersionName Version name of the encryption key used
-                                 to encrypt the encrypted key.
- @param encryptedKeyIv           Initialization vector of the encrypted
-                                 key. The IV of the encryption key used to
-                                 encrypt the encrypted key is derived from
-                                 this IV.
- @param encryptedKeyMaterial     Key material of the encrypted key.
- @return EncryptedKeyVersion suitable for decryption.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
+ have been declared explicitly.]]>
       </doc>
     </method>
-    <method name="getEncryptionKeyName" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
+    <method name="mkdir"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="dir" type="org.apache.hadoop.fs.Path"/>
+      <param name="permission" 
type="org.apache.hadoop.fs.permission.FsPermission"/>
+      <param name="createParent" type="boolean"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileAlreadyExistsException" 
type="org.apache.hadoop.fs.FileAlreadyExistsException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[@return Name of the encryption key used to encrypt the 
encrypted key.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
+ f must be fully qualified and the permission is absolute (i.e.
+ umask has been applied).]]>
       </doc>
     </method>
-    <method name="getEncryptionKeyVersionName" return="java.lang.String"
-      abstract="false" native="false" synchronized="false"
+    <method name="delete" return="boolean"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="recursive" type="boolean"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[@return Version name of the encryption key used to encrypt the 
encrypted
- key.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#delete(Path, boolean)} except that Path f must be for
+ this file system.]]>
       </doc>
     </method>
-    <method name="getEncryptedKeyIv" return="byte[]"
+    <method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[@return Initialization vector of the encrypted key. The IV of 
the
- encryption key used to encrypt the encrypted key is derived from this
- IV.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#open(Path)} except that Path f must be for this
+ file system.]]>
       </doc>
     </method>
-    <method name="getEncryptedKeyVersion" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
+    <method name="open" return="org.apache.hadoop.fs.FSDataInputStream"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="bufferSize" type="int"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[@return The encrypted encryption key version.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#open(Path, int)} except that Path f must be for this
+ file system.]]>
       </doc>
     </method>
-    <method name="deriveIV" return="byte[]"
+    <method name="truncate" return="boolean"
       abstract="false" native="false" synchronized="false"
-      static="true" final="false" visibility="protected"
+      static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="encryptedKeyIV" type="byte[]"/>
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="newLength" type="long"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[Derive the initialization vector (IV) for the encryption key 
from the IV
- of the encrypted key. This derived IV is used with the encryption key to
- decrypt the encrypted key.
- <p/>
- The alternative to this is using the same IV for both the encryption key
- and the encrypted key. Even a simple symmetric transformation like this
- improves security by avoiding IV re-use. IVs will also be fairly unique
- among different EEKs.
-
- @param encryptedKeyIV of the encrypted key (i.e. {@link
- #getEncryptedKeyIv()})
- @return IV for the encryption key]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#truncate(Path, long)} except that Path f must be for
+ this file system.]]>
       </doc>
     </method>
-    <doc>
-    <![CDATA[An encrypted encryption key (EEK) and related information. An EEK 
must be
- decrypted using the key's encryption key before it can be used.]]>
-    </doc>
-  </class>
-  <!-- end class 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion -->
-  <!-- start class 
org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension -->
-  <class name="KeyProviderDelegationTokenExtension" 
extends="org.apache.hadoop.crypto.key.KeyProviderExtension"
-    abstract="false"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <method name="addDelegationTokens" 
return="org.apache.hadoop.security.token.Token[]"
-      abstract="false" native="false" synchronized="false"
+    <method name="setReplication" return="boolean"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="renewer" type="java.lang.String"/>
-      <param name="credentials" type="org.apache.hadoop.security.Credentials"/>
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="replication" type="short"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
       <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[Passes the renewer and Credentials object to the underlying
- {@link DelegationTokenExtension}
- @param renewer the user allowed to renew the delegation tokens
- @param credentials cache in which to add new delegation tokens
- @return list of new delegation tokens
- @throws IOException thrown if IOException if an IO error occurs.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#setReplication(Path, short)} except that Path f must be
+ for this file system.]]>
       </doc>
     </method>
-    <method name="createKeyProviderDelegationTokenExtension" 
return="org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension"
+    <method name="rename"
       abstract="false" native="false" synchronized="false"
-      static="true" final="false" visibility="public"
+      static="false" final="true" visibility="public"
       deprecated="not deprecated">
-      <param name="keyProvider" 
type="org.apache.hadoop.crypto.key.KeyProvider"/>
+      <param name="src" type="org.apache.hadoop.fs.Path"/>
+      <param name="dst" type="org.apache.hadoop.fs.Path"/>
+      <param name="options" type="org.apache.hadoop.fs.Options.Rename[]"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileAlreadyExistsException" 
type="org.apache.hadoop.fs.FileAlreadyExistsException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="ParentNotDirectoryException" 
type="org.apache.hadoop.fs.ParentNotDirectoryException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[Creates a <code>KeyProviderDelegationTokenExtension</code> 
using a given
- {@link KeyProvider}.
- <p/>
- If the given <code>KeyProvider</code> implements the
- {@link DelegationTokenExtension} interface the <code>KeyProvider</code>
- itself will provide the extension functionality, otherwise a default
- extension implementation will be used.
-
- @param keyProvider <code>KeyProvider</code> to use to create the
- <code>KeyProviderDelegationTokenExtension</code> extension.
- @return a <code>KeyProviderDelegationTokenExtension</code> instance
- using the given <code>KeyProvider</code>.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
+ f must be for this file system.]]>
       </doc>
     </method>
-    <doc>
-    <![CDATA[A KeyProvider extension with the ability to add a renewer's 
Delegation
- Tokens to the provided Credentials.]]>
-    </doc>
-  </class>
-  <!-- end class 
org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension -->
-  <!-- start interface 
org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension
 -->
-  <interface 
name="KeyProviderDelegationTokenExtension.DelegationTokenExtension"    
abstract="true"
-    static="true" final="false" visibility="public"
-    deprecated="not deprecated">
-    <implements 
name="org.apache.hadoop.crypto.key.KeyProviderExtension.Extension"/>
-    <method name="addDelegationTokens" 
return="org.apache.hadoop.security.token.Token[]"
+    <method name="renameInternal"
       abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="renewer" type="java.lang.String"/>
-      <param name="credentials" type="org.apache.hadoop.security.Credentials"/>
+      <param name="src" type="org.apache.hadoop.fs.Path"/>
+      <param name="dst" type="org.apache.hadoop.fs.Path"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileAlreadyExistsException" 
type="org.apache.hadoop.fs.FileAlreadyExistsException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="ParentNotDirectoryException" 
type="org.apache.hadoop.fs.ParentNotDirectoryException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
       <exception name="IOException" type="java.io.IOException"/>
       <doc>
-      <![CDATA[The implementer of this class will take a renewer and add all
- delegation tokens associated with the renewer to the
- <code>Credentials</code> object if it is not already present,
- @param renewer the user allowed to renew the delegation tokens
- @param credentials cache in which to add new delegation tokens
- @return list of new delegation tokens
- @throws IOException thrown if IOException if an IO error occurs.]]>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
+ f must be for this file system and NO OVERWRITE is performed.
+
+ File systems that do not have a built in overwrite need implement only this
+ method and can take advantage of the default impl of the other
+ {@link #renameInternal(Path, Path, boolean)}]]>
       </doc>
     </method>
-    <doc>
-    <![CDATA[DelegationTokenExtension is a type of Extension that exposes 
methods to
- needed to work with Delegation Tokens.]]>
-    </doc>
-  </interface>
-  <!-- end interface 
org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension
 -->
-  <!-- start class org.apache.hadoop.crypto.key.KeyProviderExtension -->
-  <class name="KeyProviderExtension" 
extends="org.apache.hadoop.crypto.key.KeyProvider"
-    abstract="true"
-    static="false" final="false" visibility="public"
-    deprecated="not deprecated">
-    <constructor name="KeyProviderExtension" 
type="org.apache.hadoop.crypto.key.KeyProvider, E"
-      static="false" final="false" visibility="public"
-      deprecated="not deprecated">
-    </constructor>
-    <method name="getExtension" return="E"
-      abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="protected"
-      deprecated="not deprecated">
-    </method>
-    <method name="getKeyProvider" 
return="org.apache.hadoop.crypto.key.KeyProvider"
+    <method name="renameInternal"
       abstract="false" native="false" synchronized="false"
-      static="false" final="false" visibility="protected"
+      static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <param name="src" type="org.apache.hadoop.fs.Path"/>
+      <param name="dst" type="org.apache.hadoop.fs.Path"/>
+      <param name="overwrite" type="boolean"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileAlreadyExistsException" 
type="org.apache.hadoop.fs.FileAlreadyExistsException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="ParentNotDirectoryException" 
type="org.apache.hadoop.fs.ParentNotDirectoryException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <exception name="IOException" type="java.io.IOException"/>
+      <doc>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
+ f must be for this file system.]]>
+      </doc>
     </method>
-    <method name="isTransient" return="boolean"
+    <method name="supportsSymlinks" return="boolean"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
+      <doc>
+      <![CDATA[Returns true if the file system supports symlinks, false 
otherwise.
+ @return true if filesystem supports symlinks]]>
+      </doc>
     </method>
-    <method name="getKeysMetadata" 
return="org.apache.hadoop.crypto.key.KeyProvider.Metadata[]"
+    <method name="createSymlink"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="names" type="java.lang.String[]"/>
+      <param name="target" type="org.apache.hadoop.fs.Path"/>
+      <param name="link" type="org.apache.hadoop.fs.Path"/>
+      <param name="createParent" type="boolean"/>
       <exception name="IOException" type="java.io.IOException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
+      <doc>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#createSymlink(Path, Path, boolean)};]]>
+      </doc>
     </method>
-    <method name="getCurrentKey" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
+    <method name="getLinkTarget" return="org.apache.hadoop.fs.Path"
       abstract="false" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
       <exception name="IOException" type="java.io.IOException"/>
+      <doc>
+      <![CDATA[Partially resolves the path. This is used during symlink 
resolution in
+ {@link FSLinkResolver}, and differs from the similarly named method
+ {@link FileContext#getLinkTarget(Path)}.
+ @throws IOException subclass implementations may throw IOException]]>
+      </doc>
     </method>
-    <method name="createKey" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
+    <method name="setPermission"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
-      <param name="options" 
type="org.apache.hadoop.crypto.key.KeyProvider.Options"/>
-      <exception name="NoSuchAlgorithmException" 
type="java.security.NoSuchAlgorithmException"/>
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="permission" 
type="org.apache.hadoop.fs.permission.FsPermission"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
       <exception name="IOException" type="java.io.IOException"/>
+      <doc>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#setPermission(Path, FsPermission)} except that Path f
+ must be for this file system.]]>
+      </doc>
     </method>
-    <method name="rollNewVersion" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
+    <method name="setOwner"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="name" type="java.lang.String"/>
-      <exception name="NoSuchAlgorithmException" 
type="java.security.NoSuchAlgorithmException"/>
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+      <param name="username" type="java.lang.String"/>
+      <param name="groupname" type="java.lang.String"/>
+      <exception name="AccessControlException" 
type="org.apache.hadoop.security.AccessControlException"/>
+      <exception name="FileNotFoundException" 
type="java.io.FileNotFoundException"/>
+      <exception name="UnresolvedLinkException" 
type="org.apache.hadoop.fs.UnresolvedLinkException"/>
       <exception name="IOException" type="java.io.IOException"/>
+      <doc>
+      <![CDATA[The specification of this method matches that of
+ {@link FileContext#setOwner(Path, String, String)} except that Path f must
+ be for this file system.]]>
+      </doc>
     </method>
-    <method name="getKeyVersion" 
return="org.apache.hadoop.crypto.key.KeyProvider.KeyVersion"
-      abstract="false" native="false" synchronized="false"
+    <method name="setTimes"
+      abstract="true" native="false" synchronized="false"
       static="false" final="false" visibility="public"
       deprecated="not deprecated">
-      <param name="versionName" type="java.lang.String"/>
+      <param name="f" type="org.apache.hadoop.fs.Path"/>
+    

<TRUNCATED>

---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to