HADOOP-11593. Convert site documentation from apt to markdown (stragglers) 
(Masatake Iwasaki via aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b6fc1f3e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b6fc1f3e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b6fc1f3e

Branch: refs/heads/trunk
Commit: b6fc1f3e4355be913b7d4f6ccd48c0c26b66d039
Parents: 7c78204
Author: Allen Wittenauer <a...@apache.org>
Authored: Tue Feb 17 21:30:24 2015 -1000
Committer: Allen Wittenauer <a...@apache.org>
Committed: Tue Feb 17 21:30:24 2015 -1000

----------------------------------------------------------------------
 .../hadoop-auth/src/site/apt/BuildingIt.apt.vm  |   70 --
 .../src/site/apt/Configuration.apt.vm           |  377 -------
 .../hadoop-auth/src/site/apt/Examples.apt.vm    |  133 ---
 .../hadoop-auth/src/site/apt/index.apt.vm       |   59 -
 .../hadoop-auth/src/site/markdown/BuildingIt.md |   56 +
 .../src/site/markdown/Configuration.md          |  341 ++++++
 .../hadoop-auth/src/site/markdown/Examples.md   |  109 ++
 .../hadoop-auth/src/site/markdown/index.md      |   43 +
 hadoop-common-project/hadoop-common/CHANGES.txt |    3 +
 .../hadoop-kms/src/site/apt/index.apt.vm        | 1020 ------------------
 .../hadoop-kms/src/site/markdown/index.md.vm    |  864 +++++++++++++++
 hadoop-project/src/site/apt/index.apt.vm        |   73 --
 hadoop-project/src/site/markdown/index.md.vm    |   72 ++
 .../hadoop-openstack/src/site/apt/index.apt.vm  |  686 ------------
 .../hadoop-openstack/src/site/markdown/index.md |  544 ++++++++++
 .../src/site/resources/css/site.css             |   30 +
 .../src/site/apt/SchedulerLoadSimulator.apt.vm  |  439 --------
 .../src/site/markdown/SchedulerLoadSimulator.md |  357 ++++++
 .../src/site/apt/HadoopStreaming.apt.vm         |  792 --------------
 .../src/site/markdown/HadoopStreaming.md.vm     |  559 ++++++++++
 20 files changed, 2978 insertions(+), 3649 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm 
b/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm
deleted file mode 100644
index 2ca2f0a..0000000
--- a/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm
+++ /dev/null
@@ -1,70 +0,0 @@
-~~ Licensed under the Apache License, Version 2.0 (the "License");
-~~ you may not use this file except in compliance with the License.
-~~ You may obtain a copy of the License at
-~~
-~~   http://www.apache.org/licenses/LICENSE-2.0
-~~
-~~ Unless required by applicable law or agreed to in writing, software
-~~ distributed under the License is distributed on an "AS IS" BASIS,
-~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-~~ See the License for the specific language governing permissions and
-~~ limitations under the License. See accompanying LICENSE file.
-
-  ---
-  Hadoop Auth, Java HTTP SPNEGO ${project.version} - Building It
-  ---
-  ---
-  ${maven.build.timestamp}
-
-Hadoop Auth, Java HTTP SPNEGO ${project.version} - Building It
-
-* Requirements
-
-  * Java 6+
-
-  * Maven 3+
-
-  * Kerberos KDC (for running Kerberos test cases)
-
-* Building
-
-  Use Maven goals: clean, test, compile, package, install
-
-  Available profiles: docs, testKerberos
-
-* Testing
-
-  By default Kerberos testcases are not run.
-
-  The requirements to run Kerberos testcases are a running KDC, a keytab
-  file with a client principal and a kerberos principal.
-
-  To run Kerberos tescases use the <<<testKerberos>>> Maven profile:
-
-+---+
-$ mvn test -PtestKerberos
-+---+
-
-  The following Maven <<<-D>>> options can be used to change the default
-  values:
-
-  * <<<hadoop-auth.test.kerberos.realm>>>: default value <<LOCALHOST>>
-
-  * <<<hadoop-auth.test.kerberos.client.principal>>>: default value <<client>>
-
-  * <<<hadoop-auth.test.kerberos.server.principal>>>: default value
-    <<HTTP/localhost>> (it must start 'HTTP/')
-
-  * <<<hadoop-auth.test.kerberos.keytab.file>>>: default value
-    <<${HOME}/${USER}.keytab>>
-
-** Generating Documentation
-
-  To create the documentation use the <<<docs>>> Maven profile:
-
-+---+
-$ mvn package -Pdocs
-+---+
-
-  The generated documentation is available at
-  <<<hadoop-auth/target/site/>>>.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm 
b/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
deleted file mode 100644
index 88248e5..0000000
--- a/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
+++ /dev/null
@@ -1,377 +0,0 @@
-~~ Licensed under the Apache License, Version 2.0 (the "License");
-~~ you may not use this file except in compliance with the License.
-~~ You may obtain a copy of the License at
-~~
-~~   http://www.apache.org/licenses/LICENSE-2.0
-~~
-~~ Unless required by applicable law or agreed to in writing, software
-~~ distributed under the License is distributed on an "AS IS" BASIS,
-~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-~~ See the License for the specific language governing permissions and
-~~ limitations under the License. See accompanying LICENSE file.
-
-  ---
-  Hadoop Auth, Java HTTP SPNEGO ${project.version} - Server Side
-  Configuration
-  ---
-  ---
-  ${maven.build.timestamp}
-
-Hadoop Auth, Java HTTP SPNEGO ${project.version} - Server Side
-Configuration
-
-* Server Side Configuration Setup
-
-  The AuthenticationFilter filter is Hadoop Auth's server side component.
-
-  This filter must be configured in front of all the web application resources
-  that required authenticated requests. For example:
-
-  The Hadoop Auth and dependent JAR files must be in the web application
-  classpath (commonly the <<<WEB-INF/lib>>> directory).
-
-  Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define
-  the SLF4J API dependency but it does not define the dependency on a concrete
-  logging implementation, this must be addded explicitly to the web
-  application. For example, if the web applicationan uses Log4j, the
-  SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application
-  classpath as well as the Log4j configuration file.
-
-** Common Configuration parameters
-
-  * <<<config.prefix>>>: If specified, all other configuration parameter names
-    must start with the prefix. The default value is no prefix.
-
-  * <<<[PREFIX.]type>>>: the authentication type keyword (<<<simple>>> or
-    <<<kerberos>>>) or a Authentication handler implementation.
-
-  * <<<[PREFIX.]signature.secret>>>: When <<<signer.secret.provider>>> is set 
to
-    <<<string>>> or not specified, this is the value for the secret used to 
sign
-    the HTTP cookie.
-
-  * <<<[PREFIX.]token.validity>>>: The validity -in seconds- of the generated
-    authentication token. The default value is <<<3600>>> seconds. This is also
-    used for the rollover interval when <<<signer.secret.provider>>> is set to
-    <<<random>>> or <<<zookeeper>>>.
-
-  * <<<[PREFIX.]cookie.domain>>>: domain to use for the HTTP cookie that stores
-    the authentication token.
-
-  * <<<[PREFIX.]cookie.path>>>: path to use for the HTTP cookie that stores the
-    authentication token.
-
-  * <<<signer.secret.provider>>>: indicates the name of the 
SignerSecretProvider
-    class to use. Possible values are: <<<string>>>, <<<random>>>,
-    <<<zookeeper>>>, or a classname. If not specified, the <<<string>>>
-    implementation will be used; and failing that, the <<<random>>>
-    implementation will be used.
-
-** Kerberos Configuration
-
-  <<IMPORTANT>>: A KDC must be configured and running.
-
-  To use Kerberos SPNEGO as the authentication mechanism, the authentication
-  filter must be configured with the following init parameters:
-
-    * <<<[PREFIX.]type>>>: the keyword <<<kerberos>>>.
-
-    * <<<[PREFIX.]kerberos.principal>>>: The web-application Kerberos principal
-      name. The Kerberos principal name must start with <<<HTTP/...>>>. For
-      example: <<<HTTP/localhost@LOCALHOST>>>.  There is no default value.
-
-    * <<<[PREFIX.]kerberos.keytab>>>: The path to the keytab file containing
-      the credentials for the kerberos principal. For example:
-      <<</Users/tucu/tucu.keytab>>>. There is no default value.
-
-  <<Example>>:
-
-+---+
-<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
-    ...
-
-    <filter>
-        <filter-name>kerberosFilter</filter-name>
-        
<filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
-        <init-param>
-            <param-name>type</param-name>
-            <param-value>kerberos</param-value>
-        </init-param>
-        <init-param>
-            <param-name>token.validity</param-name>
-            <param-value>30</param-value>
-        </init-param>
-        <init-param>
-            <param-name>cookie.domain</param-name>
-            <param-value>.foo.com</param-value>
-        </init-param>
-        <init-param>
-            <param-name>cookie.path</param-name>
-            <param-value>/</param-value>
-        </init-param>
-        <init-param>
-            <param-name>kerberos.principal</param-name>
-            <param-value>HTTP/localhost@LOCALHOST</param-value>
-        </init-param>
-        <init-param>
-            <param-name>kerberos.keytab</param-name>
-            <param-value>/tmp/auth.keytab</param-value>
-        </init-param>
-    </filter>
-
-    <filter-mapping>
-        <filter-name>kerberosFilter</filter-name>
-        <url-pattern>/kerberos/*</url-pattern>
-    </filter-mapping>
-
-    ...
-</web-app>
-+---+
-
-** Pseudo/Simple Configuration
-
-  To use Pseudo/Simple as the authentication mechanism (trusting the value of
-  the query string parameter 'user.name'), the authentication filter must be
-  configured with the following init parameters:
-
-    * <<<[PREFIX.]type>>>: the keyword <<<simple>>>.
-
-    * <<<[PREFIX.]simple.anonymous.allowed>>>: is a boolean parameter that
-      indicates if anonymous requests are allowed or not. The default value is
-      <<<false>>>.
-
-  <<Example>>:
-
-+---+
-<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
-    ...
-
-    <filter>
-        <filter-name>simpleFilter</filter-name>
-        
<filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
-        <init-param>
-            <param-name>type</param-name>
-            <param-value>simple</param-value>
-        </init-param>
-        <init-param>
-            <param-name>token.validity</param-name>
-            <param-value>30</param-value>
-        </init-param>
-        <init-param>
-            <param-name>cookie.domain</param-name>
-            <param-value>.foo.com</param-value>
-        </init-param>
-        <init-param>
-            <param-name>cookie.path</param-name>
-            <param-value>/</param-value>
-        </init-param>
-        <init-param>
-            <param-name>simple.anonymous.allowed</param-name>
-            <param-value>false</param-value>
-        </init-param>
-    </filter>
-
-    <filter-mapping>
-        <filter-name>simpleFilter</filter-name>
-        <url-pattern>/simple/*</url-pattern>
-    </filter-mapping>
-
-    ...
-</web-app>
-+---+
-
-** AltKerberos Configuration
-
-  <<IMPORTANT>>: A KDC must be configured and running.
-
-  The AltKerberos authentication mechanism is a partially implemented 
derivative
-  of the Kerberos SPNEGO authentication mechanism which allows a "mixed" form 
of
-  authentication where Kerberos SPNEGO is used by non-browsers while an
-  alternate form of authentication (to be implemented by the user) is used for
-  browsers.  To use AltKerberos as the authentication mechanism (besides
-  providing an implementation), the authentication filter must be configured
-  with the following init parameters, in addition to the previously mentioned
-  Kerberos SPNEGO ones:
-
-    * <<<[PREFIX.]type>>>: the full class name of the implementation of
-      AltKerberosAuthenticationHandler to use.
-
-    * <<<[PREFIX.]alt-kerberos.non-browser.user-agents>>>: a comma-separated
-      list of which user-agents should be considered non-browsers.
-
-  <<Example>>:
-
-+---+
-<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
-    ...
-
-    <filter>
-        <filter-name>kerberosFilter</filter-name>
-        
<filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
-        <init-param>
-            <param-name>type</param-name>
-            
<param-value>org.my.subclass.of.AltKerberosAuthenticationHandler</param-value>
-        </init-param>
-        <init-param>
-            <param-name>alt-kerberos.non-browser.user-agents</param-name>
-            <param-value>java,curl,wget,perl</param-value>
-        </init-param>
-        <init-param>
-            <param-name>token.validity</param-name>
-            <param-value>30</param-value>
-        </init-param>
-        <init-param>
-            <param-name>cookie.domain</param-name>
-            <param-value>.foo.com</param-value>
-        </init-param>
-        <init-param>
-            <param-name>cookie.path</param-name>
-            <param-value>/</param-value>
-        </init-param>
-        <init-param>
-            <param-name>kerberos.principal</param-name>
-            <param-value>HTTP/localhost@LOCALHOST</param-value>
-        </init-param>
-        <init-param>
-            <param-name>kerberos.keytab</param-name>
-            <param-value>/tmp/auth.keytab</param-value>
-        </init-param>
-    </filter>
-
-    <filter-mapping>
-        <filter-name>kerberosFilter</filter-name>
-        <url-pattern>/kerberos/*</url-pattern>
-    </filter-mapping>
-
-    ...
-</web-app>
-+---+
-
-** SignerSecretProvider Configuration
-
-  The SignerSecretProvider is used to provide more advanced behaviors for the
-  secret used for signing the HTTP Cookies.
-
-  These are the relevant configuration properties:
-
-    * <<<signer.secret.provider>>>: indicates the name of the
-      SignerSecretProvider class to use. Possible values are: "string",
-      "random", "zookeeper", or a classname. If not specified, the "string"
-      implementation will be used; and failing that, the "random" 
implementation
-      will be used.
-
-    * <<<[PREFIX.]signature.secret>>>: When <<<signer.secret.provider>>> is set
-      to <<<string>>> or not specified, this is the value for the secret used 
to
-      sign the HTTP cookie.
-
-    * <<<[PREFIX.]token.validity>>>: The validity -in seconds- of the generated
-      authentication token. The default value is <<<3600>>> seconds. This is
-      also used for the rollover interval when <<<signer.secret.provider>>> is
-      set to <<<random>>> or <<<zookeeper>>>.
-
-  The following configuration properties are specific to the <<<zookeeper>>>
-  implementation:
-
-    * <<<signer.secret.provider.zookeeper.connection.string>>>: Indicates the
-      ZooKeeper connection string to connect with.
-
-    * <<<signer.secret.provider.zookeeper.path>>>: Indicates the ZooKeeper path
-      to use for storing and retrieving the secrets.  All servers
-      that need to coordinate their secret should point to the same path
-
-    * <<<signer.secret.provider.zookeeper.auth.type>>>: Indicates the auth type
-      to use.  Supported values are <<<none>>> and <<<sasl>>>.  The default
-      value is <<<none>>>.
-
-    * <<<signer.secret.provider.zookeeper.kerberos.keytab>>>: Set this to the
-      path with the Kerberos keytab file.  This is only required if using
-      Kerberos.
-
-    * <<<signer.secret.provider.zookeeper.kerberos.principal>>>: Set this to 
the
-      Kerberos principal to use.  This only required if using Kerberos.
-
-  <<Example>>:
-
-+---+
-<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
-    ...
-
-    <filter>
-        <!-- AuthenticationHandler configs not shown -->
-        <init-param>
-            <param-name>signer.secret.provider</param-name>
-            <param-value>string</param-value>
-        </init-param>
-        <init-param>
-            <param-name>signature.secret</param-name>
-            <param-value>my_secret</param-value>
-        </init-param>
-    </filter>
-
-    ...
-</web-app>
-+---+
-
-  <<Example>>:
-
-+---+
-<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
-    ...
-
-    <filter>
-        <!-- AuthenticationHandler configs not shown -->
-        <init-param>
-            <param-name>signer.secret.provider</param-name>
-            <param-value>random</param-value>
-        </init-param>
-        <init-param>
-            <param-name>token.validity</param-name>
-            <param-value>30</param-value>
-        </init-param>
-    </filter>
-
-    ...
-</web-app>
-+---+
-
-  <<Example>>:
-
-+---+
-<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
-    ...
-
-    <filter>
-        <!-- AuthenticationHandler configs not shown -->
-        <init-param>
-            <param-name>signer.secret.provider</param-name>
-            <param-value>zookeeper</param-value>
-        </init-param>
-        <init-param>
-            <param-name>token.validity</param-name>
-            <param-value>30</param-value>
-        </init-param>
-        <init-param>
-            
<param-name>signer.secret.provider.zookeeper.connection.string</param-name>
-            <param-value>zoo1:2181,zoo2:2181,zoo3:2181</param-value>
-        </init-param>
-        <init-param>
-            <param-name>signer.secret.provider.zookeeper.path</param-name>
-            <param-value>/myapp/secrets</param-value>
-        </init-param>
-        <init-param>
-            
<param-name>signer.secret.provider.zookeeper.use.kerberos.acls</param-name>
-            <param-value>true</param-value>
-        </init-param>
-        <init-param>
-            
<param-name>signer.secret.provider.zookeeper.kerberos.keytab</param-name>
-            <param-value>/tmp/auth.keytab</param-value>
-        </init-param>
-        <init-param>
-            
<param-name>signer.secret.provider.zookeeper.kerberos.principal</param-name>
-            <param-value>HTTP/localhost@LOCALHOST</param-value>
-        </init-param>
-    </filter>
-
-    ...
-</web-app>
-+---+
-

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm 
b/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm
deleted file mode 100644
index 1b1afd5..0000000
--- a/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm
+++ /dev/null
@@ -1,133 +0,0 @@
-~~ Licensed under the Apache License, Version 2.0 (the "License");
-~~ you may not use this file except in compliance with the License.
-~~ You may obtain a copy of the License at
-~~
-~~   http://www.apache.org/licenses/LICENSE-2.0
-~~
-~~ Unless required by applicable law or agreed to in writing, software
-~~ distributed under the License is distributed on an "AS IS" BASIS,
-~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-~~ See the License for the specific language governing permissions and
-~~ limitations under the License. See accompanying LICENSE file.
-
-  ---
-  Hadoop Auth, Java HTTP SPNEGO ${project.version} - Examples
-  ---
-  ---
-  ${maven.build.timestamp}
-
-Hadoop Auth, Java HTTP SPNEGO ${project.version} - Examples
-
-* Accessing a Hadoop Auth protected URL Using a browser
-
-  <<IMPORTANT:>> The browser must support HTTP Kerberos SPNEGO. For example,
-  Firefox or Internet Explorer.
-
-  For Firefox access the low level configuration page by loading the
-  <<<about:config>>> page. Then go to the
-  <<<network.negotiate-auth.trusted-uris>>> preference and add the hostname or
-  the domain of the web server that is HTTP Kerberos SPNEGO protected (if using
-  multiple domains and hostname use comma to separate them).
-  
-* Accessing a Hadoop Auth protected URL Using <<<curl>>>
-
-  <<IMPORTANT:>> The <<<curl>>> version must support GSS, run <<<curl -V>>>.
-
-+---+
-$ curl -V
-curl 7.19.7 (universal-apple-darwin10.0) libcurl/7.19.7 OpenSSL/0.9.8l 
zlib/1.2.3
-Protocols: tftp ftp telnet dict ldap http file https ftps
-Features: GSS-Negotiate IPv6 Largefile NTLM SSL libz
-+---+
-
-  Login to the KDC using <<kinit>> and then use <<<curl>>> to fetch protected
-  URL:
-
-+---+
-$ kinit
-Please enter the password for tucu@LOCALHOST:
-$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt 
http://localhost:8080/hadoop-auth-examples/kerberos/who
-Enter host password for user 'tucu':
-
-Hello Hadoop Auth Examples!
-+---+
-
-  * The <<<--negotiate>>> option enables SPNEGO in <<<curl>>>.
-
-  * The <<<-u foo>>> option is required but the user ignored (the principal
-    that has been kinit-ed is used).
-
-  * The <<<-b>>> and <<<-c>>> are use to store and send HTTP Cookies.
-
-* Using the Java Client
-
-  Use the <<<AuthenticatedURL>>> class to obtain an authenticated HTTP
-  connection:
-
-+---+
-...
-URL url = new URL("http://localhost:8080/hadoop-auth/kerberos/who";);
-AuthenticatedURL.Token token = new AuthenticatedURL.Token();
-...
-HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
-...
-conn = new AuthenticatedURL(url, token).openConnection();
-...
-+---+
-
-* Building and Running the Examples
-
-  Download Hadoop-Auth's source code, the examples are in the
-  <<<src/main/examples>>> directory.
-
-** Server Example:
-
-  Edit the <<<hadoop-auth-examples/src/main/webapp/WEB-INF/web.xml>>> and set 
the
-  right configuration init parameters for the <<<AuthenticationFilter>>>
-  definition configured for Kerberos (the right Kerberos principal and keytab
-  file must be specified). Refer to the {{{./Configuration.html}Configuration
-  document}} for details.
-
-  Create the web application WAR file by running the <<<mvn package>>> command.
-
-  Deploy the WAR file in a servlet container. For example, if using Tomcat,
-  copy the WAR file to Tomcat's <<<webapps/>>> directory.
-
-  Start the servlet container.
-
-** Accessing the server using <<<curl>>>
-
-  Try accessing protected resources using <<<curl>>>. The protected resources
-  are:
-
-+---+
-$ kinit
-Please enter the password for tucu@LOCALHOST:
-
-$ curl http://localhost:8080/hadoop-auth-examples/anonymous/who
-
-$ curl http://localhost:8080/hadoop-auth-examples/simple/who?user.name=foo
-
-$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt 
http://localhost:8080/hadoop-auth-examples/kerberos/who
-+---+
-
-** Accessing the server using the Java client example
-
-+---+
-$ kinit
-Please enter the password for tucu@LOCALHOST:
-
-$ cd examples
-
-$ mvn exec:java -Durl=http://localhost:8080/hadoop-auth-examples/kerberos/who
-
-....
-
-Token value: 
"u=tucu,p=tucu@LOCALHOST,t=kerberos,e=1295305313146,s=sVZ1mpSnC5TKhZQE3QLN5p2DWBo="
-Status code: 200 OK
-
-You are: user[tucu] principal[tucu@LOCALHOST]
-
-....
-
-+---+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm 
b/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm
deleted file mode 100644
index bf85f7f..0000000
--- a/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm
+++ /dev/null
@@ -1,59 +0,0 @@
-~~ Licensed under the Apache License, Version 2.0 (the "License");
-~~ you may not use this file except in compliance with the License.
-~~ You may obtain a copy of the License at
-~~
-~~   http://www.apache.org/licenses/LICENSE-2.0
-~~
-~~ Unless required by applicable law or agreed to in writing, software
-~~ distributed under the License is distributed on an "AS IS" BASIS,
-~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-~~ See the License for the specific language governing permissions and
-~~ limitations under the License. See accompanying LICENSE file.
-
-  ---
-  Hadoop Auth, Java HTTP SPNEGO ${project.version}
-  ---
-  ---
-  ${maven.build.timestamp}
-
-Hadoop Auth, Java HTTP SPNEGO ${project.version}
-
-  Hadoop Auth is a Java library consisting of a client and a server
-  components to enable Kerberos SPNEGO authentication for HTTP.
-
-  Hadoop Auth also supports additional authentication mechanisms on the client
-  and the server side via 2 simple interfaces.
-
-  Additionally, it provides a partially implemented derivative of the Kerberos
-  SPNEGO authentication to allow a "mixed" form of authentication where 
Kerberos
-  SPNEGO is used by non-browsers while an alternate form of authentication
-  (to be implemented by the user) is used for browsers.
-
-* License
-
-  Hadoop Auth is distributed under {{{http://www.apache.org/licenses/}Apache
-  License 2.0}}.
-
-* How Does Auth Works?
-
-  Hadoop Auth enforces authentication on protected resources, once 
authentiation
-  has been established it sets a signed HTTP Cookie that contains an
-  authentication token with the user name, user principal, authentication type
-  and expiration time.
-
-  Subsequent HTTP client requests presenting the signed HTTP Cookie have access
-  to the protected resources until the HTTP Cookie expires.
-
-  The secret used to sign the HTTP Cookie has multiple implementations that
-  provide different behaviors, including a hardcoded secret string, a rolling
-  randomly generated secret, and a rolling randomly generated secret
-  synchronized between multiple servers using ZooKeeper.
-
-* User Documentation
-
-  * {{{./Examples.html}Examples}}
-
-  * {{{./Configuration.html}Configuration}}
-
-  * {{{./BuildingIt.html}Building It}}
-

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/markdown/BuildingIt.md
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/site/markdown/BuildingIt.md 
b/hadoop-common-project/hadoop-auth/src/site/markdown/BuildingIt.md
new file mode 100644
index 0000000..53a49d4
--- /dev/null
+++ b/hadoop-common-project/hadoop-auth/src/site/markdown/BuildingIt.md
@@ -0,0 +1,56 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+Hadoop Auth, Java HTTP SPNEGO - Building It
+===========================================
+
+Requirements
+------------
+
+* Java 6+
+* Maven 3+
+* Kerberos KDC (for running Kerberos test cases)
+
+Building
+--------
+
+Use Maven goals: clean, test, compile, package, install
+
+Available profiles: docs, testKerberos
+
+Testing
+-------
+
+By default Kerberos testcases are not run.
+
+The requirements to run Kerberos testcases are a running KDC, a keytab file 
with a client principal and a kerberos principal.
+
+To run Kerberos tescases use the `testKerberos` Maven profile:
+
+    $ mvn test -PtestKerberos
+
+The following Maven `-D` options can be used to change the default values:
+
+* `hadoop-auth.test.kerberos.realm`: default value **LOCALHOST**
+* `hadoop-auth.test.kerberos.client.principal`: default value **client**
+* `hadoop-auth.test.kerberos.server.principal`: default value 
**HTTP/localhost** (it must start 'HTTP/')
+* `hadoop-auth.test.kerberos.keytab.file`: default value **$HOME/$USER.keytab**
+
+### Generating Documentation
+
+To create the documentation use the `docs` Maven profile:
+
+    $ mvn package -Pdocs
+
+The generated documentation is available at `hadoop-auth/target/site/`.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/markdown/Configuration.md
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-auth/src/site/markdown/Configuration.md 
b/hadoop-common-project/hadoop-auth/src/site/markdown/Configuration.md
new file mode 100644
index 0000000..9d076bb
--- /dev/null
+++ b/hadoop-common-project/hadoop-auth/src/site/markdown/Configuration.md
@@ -0,0 +1,341 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+Hadoop Auth, Java HTTP SPNEGO - Server Side Configuration
+=========================================================
+
+Server Side Configuration Setup
+-------------------------------
+
+The AuthenticationFilter filter is Hadoop Auth's server side component.
+
+This filter must be configured in front of all the web application resources 
that required authenticated requests. For example:
+
+The Hadoop Auth and dependent JAR files must be in the web application 
classpath (commonly the `WEB-INF/lib` directory).
+
+Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the 
SLF4J API dependency but it does not define the dependency on a concrete 
logging implementation, this must be addded explicitly to the web application. 
For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J 
jar files must be part part of the web application classpath as well as the 
Log4j configuration file.
+
+### Common Configuration parameters
+
+*   `config.prefix`: If specified, all other configuration parameter names
+    must start with the prefix. The default value is no prefix.
+
+*   `[PREFIX.]type`: the authentication type keyword (`simple` or \
+    `kerberos`) or a Authentication handler implementation.
+
+*   `[PREFIX.]signature.secret`: When `signer.secret.provider` is set to
+    `string` or not specified, this is the value for the secret used to sign
+    the HTTP cookie.
+
+*   `[PREFIX.]token.validity`: The validity -in seconds- of the generated
+    authentication token. The default value is `3600` seconds. This is also
+    used for the rollover interval when `signer.secret.provider` is set to
+    `random` or `zookeeper`.
+
+*   `[PREFIX.]cookie.domain`: domain to use for the HTTP cookie that stores
+    the authentication token.
+
+*   `[PREFIX.]cookie.path`: path to use for the HTTP cookie that stores the
+    authentication token.
+
+*   `signer.secret.provider`: indicates the name of the SignerSecretProvider
+    class to use. Possible values are: `string`, `random`,
+    `zookeeper`, or a classname. If not specified, the `string`
+    implementation will be used; and failing that, the `random`
+    implementation will be used.
+
+### Kerberos Configuration
+
+**IMPORTANT**: A KDC must be configured and running.
+
+To use Kerberos SPNEGO as the authentication mechanism, the authentication 
filter must be configured with the following init parameters:
+
+*   `[PREFIX.]type`: the keyword `kerberos`.
+
+*   `[PREFIX.]kerberos.principal`: The web-application Kerberos principal
+    name. The Kerberos principal name must start with `HTTP/...`. For
+    example: `HTTP/localhost@LOCALHOST`. There is no default value.
+
+*   `[PREFIX.]kerberos.keytab`: The path to the keytab file containing
+    the credentials for the kerberos principal. For example:
+    `/Users/tucu/tucu.keytab`. There is no default value.
+
+**Example**:
+
+    <web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
+        ...
+
+        <filter>
+            <filter-name>kerberosFilter</filter-name>
+            
<filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
+            <init-param>
+                <param-name>type</param-name>
+                <param-value>kerberos</param-value>
+            </init-param>
+            <init-param>
+                <param-name>token.validity</param-name>
+                <param-value>30</param-value>
+            </init-param>
+            <init-param>
+                <param-name>cookie.domain</param-name>
+                <param-value>.foo.com</param-value>
+            </init-param>
+            <init-param>
+                <param-name>cookie.path</param-name>
+                <param-value>/</param-value>
+            </init-param>
+            <init-param>
+                <param-name>kerberos.principal</param-name>
+                <param-value>HTTP/localhost@LOCALHOST</param-value>
+            </init-param>
+            <init-param>
+                <param-name>kerberos.keytab</param-name>
+                <param-value>/tmp/auth.keytab</param-value>
+            </init-param>
+        </filter>
+
+        <filter-mapping>
+            <filter-name>kerberosFilter</filter-name>
+            <url-pattern>/kerberos/*</url-pattern>
+        </filter-mapping>
+
+        ...
+    </web-app>
+
+### Pseudo/Simple Configuration
+
+To use Pseudo/Simple as the authentication mechanism (trusting the value of 
the query string parameter 'user.name'), the authentication filter must be 
configured with the following init parameters:
+
+*   `[PREFIX.]type`: the keyword `simple`.
+
+*   `[PREFIX.]simple.anonymous.allowed`: is a boolean parameter that
+    indicates if anonymous requests are allowed or not. The default value is
+    `false`.
+
+**Example**:
+
+    <web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
+        ...
+
+        <filter>
+            <filter-name>simpleFilter</filter-name>
+            
<filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
+            <init-param>
+                <param-name>type</param-name>
+                <param-value>simple</param-value>
+            </init-param>
+            <init-param>
+                <param-name>token.validity</param-name>
+                <param-value>30</param-value>
+            </init-param>
+            <init-param>
+                <param-name>cookie.domain</param-name>
+                <param-value>.foo.com</param-value>
+            </init-param>
+            <init-param>
+                <param-name>cookie.path</param-name>
+                <param-value>/</param-value>
+            </init-param>
+            <init-param>
+                <param-name>simple.anonymous.allowed</param-name>
+                <param-value>false</param-value>
+            </init-param>
+        </filter>
+
+        <filter-mapping>
+            <filter-name>simpleFilter</filter-name>
+            <url-pattern>/simple/*</url-pattern>
+        </filter-mapping>
+
+        ...
+    </web-app>
+
+### AltKerberos Configuration
+
+**IMPORTANT**: A KDC must be configured and running.
+
+The AltKerberos authentication mechanism is a partially implemented derivative 
of the Kerberos SPNEGO authentication mechanism which allows a "mixed" form of 
authentication where Kerberos SPNEGO is used by non-browsers while an alternate 
form of authentication (to be implemented by the user) is used for browsers. To 
use AltKerberos as the authentication mechanism (besides providing an 
implementation), the authentication filter must be configured with the 
following init parameters, in addition to the previously mentioned Kerberos 
SPNEGO ones:
+
+*   `[PREFIX.]type`: the full class name of the implementation of
+    AltKerberosAuthenticationHandler to use.
+
+*   `[PREFIX.]alt-kerberos.non-browser.user-agents`: a comma-separated
+    list of which user-agents should be considered non-browsers.
+
+**Example**:
+
+    <web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
+        ...
+
+        <filter>
+            <filter-name>kerberosFilter</filter-name>
+            
<filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
+            <init-param>
+                <param-name>type</param-name>
+                
<param-value>org.my.subclass.of.AltKerberosAuthenticationHandler</param-value>
+            </init-param>
+            <init-param>
+                <param-name>alt-kerberos.non-browser.user-agents</param-name>
+                <param-value>java,curl,wget,perl</param-value>
+            </init-param>
+            <init-param>
+                <param-name>token.validity</param-name>
+                <param-value>30</param-value>
+            </init-param>
+            <init-param>
+                <param-name>cookie.domain</param-name>
+                <param-value>.foo.com</param-value>
+            </init-param>
+            <init-param>
+                <param-name>cookie.path</param-name>
+                <param-value>/</param-value>
+            </init-param>
+            <init-param>
+                <param-name>kerberos.principal</param-name>
+                <param-value>HTTP/localhost@LOCALHOST</param-value>
+            </init-param>
+            <init-param>
+                <param-name>kerberos.keytab</param-name>
+                <param-value>/tmp/auth.keytab</param-value>
+            </init-param>
+        </filter>
+
+        <filter-mapping>
+            <filter-name>kerberosFilter</filter-name>
+            <url-pattern>/kerberos/*</url-pattern>
+        </filter-mapping>
+
+        ...
+    </web-app>
+
+### SignerSecretProvider Configuration
+
+The SignerSecretProvider is used to provide more advanced behaviors for the 
secret used for signing the HTTP Cookies.
+
+These are the relevant configuration properties:
+
+*   `signer.secret.provider`: indicates the name of the
+    SignerSecretProvider class to use. Possible values are: "string",
+    "random", "zookeeper", or a classname. If not specified, the "string"
+    implementation will be used; and failing that, the "random" implementation
+    will be used.
+
+*   `[PREFIX.]signature.secret`: When `signer.secret.provider` is set
+    to `string` or not specified, this is the value for the secret used to
+    sign the HTTP cookie.
+
+*   `[PREFIX.]token.validity`: The validity -in seconds- of the generated
+    authentication token. The default value is `3600` seconds. This is
+    also used for the rollover interval when `signer.secret.provider` is
+    set to `random` or `zookeeper`.
+
+The following configuration properties are specific to the `zookeeper` 
implementation:
+
+*   `signer.secret.provider.zookeeper.connection.string`: Indicates the
+    ZooKeeper connection string to connect with.
+
+*   `signer.secret.provider.zookeeper.path`: Indicates the ZooKeeper path
+    to use for storing and retrieving the secrets. All servers
+    that need to coordinate their secret should point to the same path
+
+*   `signer.secret.provider.zookeeper.auth.type`: Indicates the auth type
+    to use. Supported values are `none` and `sasl`. The default
+    value is `none`.
+
+*   `signer.secret.provider.zookeeper.kerberos.keytab`: Set this to the
+    path with the Kerberos keytab file. This is only required if using
+    Kerberos.
+
+*   `signer.secret.provider.zookeeper.kerberos.principal`: Set this to the
+    Kerberos principal to use. This only required if using Kerberos.
+
+**Example**:
+
+    <web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
+        ...
+
+        <filter>
+            <!-- AuthenticationHandler configs not shown -->
+            <init-param>
+                <param-name>signer.secret.provider</param-name>
+                <param-value>string</param-value>
+            </init-param>
+            <init-param>
+                <param-name>signature.secret</param-name>
+                <param-value>my_secret</param-value>
+            </init-param>
+        </filter>
+
+        ...
+    </web-app>
+
+**Example**:
+
+    <web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
+        ...
+
+        <filter>
+            <!-- AuthenticationHandler configs not shown -->
+            <init-param>
+                <param-name>signer.secret.provider</param-name>
+                <param-value>random</param-value>
+            </init-param>
+            <init-param>
+                <param-name>token.validity</param-name>
+                <param-value>30</param-value>
+            </init-param>
+        </filter>
+
+        ...
+    </web-app>
+
+**Example**:
+
+    <web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee";>
+        ...
+
+        <filter>
+            <!-- AuthenticationHandler configs not shown -->
+            <init-param>
+                <param-name>signer.secret.provider</param-name>
+                <param-value>zookeeper</param-value>
+            </init-param>
+            <init-param>
+                <param-name>token.validity</param-name>
+                <param-value>30</param-value>
+            </init-param>
+            <init-param>
+                
<param-name>signer.secret.provider.zookeeper.connection.string</param-name>
+                <param-value>zoo1:2181,zoo2:2181,zoo3:2181</param-value>
+            </init-param>
+            <init-param>
+                <param-name>signer.secret.provider.zookeeper.path</param-name>
+                <param-value>/myapp/secrets</param-value>
+            </init-param>
+            <init-param>
+                
<param-name>signer.secret.provider.zookeeper.use.kerberos.acls</param-name>
+                <param-value>true</param-value>
+            </init-param>
+            <init-param>
+                
<param-name>signer.secret.provider.zookeeper.kerberos.keytab</param-name>
+                <param-value>/tmp/auth.keytab</param-value>
+            </init-param>
+            <init-param>
+                
<param-name>signer.secret.provider.zookeeper.kerberos.principal</param-name>
+                <param-value>HTTP/localhost@LOCALHOST</param-value>
+            </init-param>
+        </filter>
+
+        ...
+    </web-app>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/markdown/Examples.md
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/site/markdown/Examples.md 
b/hadoop-common-project/hadoop-auth/src/site/markdown/Examples.md
new file mode 100644
index 0000000..7efb642
--- /dev/null
+++ b/hadoop-common-project/hadoop-auth/src/site/markdown/Examples.md
@@ -0,0 +1,109 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+Hadoop Auth, Java HTTP SPNEGO - Examples
+========================================
+
+Accessing a Hadoop Auth protected URL Using a browser
+-----------------------------------------------------
+
+**IMPORTANT:** The browser must support HTTP Kerberos SPNEGO. For example, 
Firefox or Internet Explorer.
+
+For Firefox access the low level configuration page by loading the 
`about:config` page. Then go to the `network.negotiate-auth.trusted-uris` 
preference and add the hostname or the domain of the web server that is HTTP 
Kerberos SPNEGO protected (if using multiple domains and hostname use comma to 
separate them).
+
+Accessing a Hadoop Auth protected URL Using `curl`
+--------------------------------------------------
+
+**IMPORTANT:** The `curl` version must support GSS, run `curl -V`.
+
+    $ curl -V
+    curl 7.19.7 (universal-apple-darwin10.0) libcurl/7.19.7 OpenSSL/0.9.8l 
zlib/1.2.3
+    Protocols: tftp ftp telnet dict ldap http file https ftps
+    Features: GSS-Negotiate IPv6 Largefile NTLM SSL libz
+
+Login to the KDC using **kinit** and then use `curl` to fetch protected URL:
+
+    $ kinit
+    Please enter the password for tucu@LOCALHOST:
+    $ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt 
http://localhost:8080/hadoop-auth-examples/kerberos/who
+    Enter host password for user 'tucu':
+
+    Hello Hadoop Auth Examples!
+
+*   The `--negotiate` option enables SPNEGO in `curl`.
+
+*   The `-u foo` option is required but the user ignored (the principal
+    that has been kinit-ed is used).
+
+*   The `-b` and `-c` are use to store and send HTTP Cookies.
+
+Using the Java Client
+---------------------
+
+Use the `AuthenticatedURL` class to obtain an authenticated HTTP connection:
+
+    ...
+    URL url = new URL("http://localhost:8080/hadoop-auth/kerberos/who";);
+    AuthenticatedURL.Token token = new AuthenticatedURL.Token();
+    ...
+    HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
+    ...
+    conn = new AuthenticatedURL(url, token).openConnection();
+    ...
+
+Building and Running the Examples
+---------------------------------
+
+Download Hadoop-Auth's source code, the examples are in the 
`src/main/examples` directory.
+
+### Server Example:
+
+Edit the `hadoop-auth-examples/src/main/webapp/WEB-INF/web.xml` and set the 
right configuration init parameters for the `AuthenticationFilter` definition 
configured for Kerberos (the right Kerberos principal and keytab file must be 
specified). Refer to the [Configuration document](./Configuration.html) for 
details.
+
+Create the web application WAR file by running the `mvn package` command.
+
+Deploy the WAR file in a servlet container. For example, if using Tomcat, copy 
the WAR file to Tomcat's `webapps/` directory.
+
+Start the servlet container.
+
+### Accessing the server using `curl`
+
+Try accessing protected resources using `curl`. The protected resources are:
+
+    $ kinit
+    Please enter the password for tucu@LOCALHOST:
+
+    $ curl http://localhost:8080/hadoop-auth-examples/anonymous/who
+
+    $ curl http://localhost:8080/hadoop-auth-examples/simple/who?user.name=foo
+
+    $ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt 
http://localhost:8080/hadoop-auth-examples/kerberos/who
+
+### Accessing the server using the Java client example
+
+    $ kinit
+    Please enter the password for tucu@LOCALHOST:
+
+    $ cd examples
+
+    $ mvn exec:java 
-Durl=http://localhost:8080/hadoop-auth-examples/kerberos/who
+
+    ....
+
+    Token value: 
"u=tucu,p=tucu@LOCALHOST,t=kerberos,e=1295305313146,s=sVZ1mpSnC5TKhZQE3QLN5p2DWBo="
+    Status code: 200 OK
+
+    You are: user[tucu] principal[tucu@LOCALHOST]
+
+    ....

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-auth/src/site/markdown/index.md
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/site/markdown/index.md 
b/hadoop-common-project/hadoop-auth/src/site/markdown/index.md
new file mode 100644
index 0000000..8573b18
--- /dev/null
+++ b/hadoop-common-project/hadoop-auth/src/site/markdown/index.md
@@ -0,0 +1,43 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+Hadoop Auth, Java HTTP SPNEGO
+=============================
+
+Hadoop Auth is a Java library consisting of a client and a server components 
to enable Kerberos SPNEGO authentication for HTTP.
+
+Hadoop Auth also supports additional authentication mechanisms on the client 
and the server side via 2 simple interfaces.
+
+Additionally, it provides a partially implemented derivative of the Kerberos 
SPNEGO authentication to allow a "mixed" form of authentication where Kerberos 
SPNEGO is used by non-browsers while an alternate form of authentication (to be 
implemented by the user) is used for browsers.
+
+License
+-------
+
+Hadoop Auth is distributed under [Apache License 
2.0](http://www.apache.org/licenses/).
+
+How Does Auth Works?
+--------------------
+
+Hadoop Auth enforces authentication on protected resources, once authentiation 
has been established it sets a signed HTTP Cookie that contains an 
authentication token with the user name, user principal, authentication type 
and expiration time.
+
+Subsequent HTTP client requests presenting the signed HTTP Cookie have access 
to the protected resources until the HTTP Cookie expires.
+
+The secret used to sign the HTTP Cookie has multiple implementations that 
provide different behaviors, including a hardcoded secret string, a rolling 
randomly generated secret, and a rolling randomly generated secret synchronized 
between multiple servers using ZooKeeper.
+
+User Documentation
+------------------
+
+* [Examples](./Examples.html)
+* [Configuration](./Configuration.html)
+* [Building It](./BuildingIt.html)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index d8a85f7..b3b2c95 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -181,6 +181,9 @@ Trunk (Unreleased)
     HADOOP-11596. Allow smart-apply-patch.sh to add new files in binary git
     patches (raviprak)
 
+    HADOOP-11593. Convert site documentation from apt to markdown (stragglers)
+    (Masatake Iwasaki via aw)
+
   BUG FIXES
 
     HADOOP-11473. test-patch says "-1 overall" even when all checks are +1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6fc1f3e/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm 
b/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
deleted file mode 100644
index a2dcce3..0000000
--- a/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
+++ /dev/null
@@ -1,1020 +0,0 @@
-~~ Licensed under the Apache License, Version 2.0 (the "License");
-~~ you may not use this file except in compliance with the License.
-~~ You may obtain a copy of the License at
-~~
-~~ http://www.apache.org/licenses/LICENSE-2.0
-~~
-~~ Unless required by applicable law or agreed to in writing, software
-~~ distributed under the License is distributed on an "AS IS" BASIS,
-~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-~~ See the License for the specific language governing permissions and
-~~ limitations under the License.
-
-  ---
-  Hadoop KMS - Documentation Sets ${project.version}
-  ---
-  ---
-  ${maven.build.timestamp}
-
-Hadoop Key Management Server (KMS) - Documentation Sets ${project.version}
-
-  Hadoop KMS is a cryptographic key management server based on Hadoop's
-  <<KeyProvider>> API.
-
-  It provides a client and a server components which communicate over
-  HTTP using a REST API.
-
-  The client is a KeyProvider implementation interacts with the KMS
-  using the KMS HTTP REST API.
-
-  KMS and its client have built-in security and they support HTTP SPNEGO
-  Kerberos authentication and HTTPS secure transport.
-
-  KMS is a Java web-application and it runs using a pre-configured Tomcat
-  bundled with the Hadoop distribution.
-
-* KMS Client Configuration
-
-  The KMS client <<<KeyProvider>>> uses the <<kms>> scheme, and the embedded
-  URL must be the URL of the KMS. For example, for a KMS running
-  on <<<http://localhost:16000/kms>>>, the KeyProvider URI is
-  <<<kms://http@localhost:16000/kms>>>. And, for a KMS running on
-  <<<https://localhost:16000/kms>>>, the KeyProvider URI is
-  <<<kms://https@localhost:16000/kms>>>
-
-* KMS
-
-** KMS Configuration
-
-  Configure the KMS backing KeyProvider properties
-  in the <<<etc/hadoop/kms-site.xml>>> configuration file:
-
-+---+
-  <property>
-    <name>hadoop.kms.key.provider.uri</name>
-    <value>jceks://file@/${user.home}/kms.keystore</value>
-  </property>
-
-  <property>
-    <name>hadoop.security.keystore.java-keystore-provider.password-file</name>
-    <value>kms.keystore.password</value>
-  </property>
-+---+
-
-  The password file is looked up in the Hadoop's configuration directory via 
the
-  classpath.
-
-  NOTE: You need to restart the KMS for the configuration changes to take
-  effect.
-
-** KMS Cache
-
-  KMS caches keys for short period of time to avoid excessive hits to the
-  underlying key provider.
-
-  The Cache is enabled by default (can be dissabled by setting the
-  <<<hadoop.kms.cache.enable>>> boolean property to false)
-
-  The cache is used with the following 3 methods only, <<<getCurrentKey()>>>
-  and <<<getKeyVersion()>>> and <<<getMetadata()>>>.
-
-  For the <<<getCurrentKey()>>> method, cached entries are kept for a maximum
-  of 30000 millisecond regardless the number of times the key is being access
-  (to avoid stale keys to be considered current).
-
-  For the <<<getKeyVersion()>>> method, cached entries are kept with a default
-  inactivity timeout of 600000 milliseconds (10 mins). This time out is
-  configurable via the following property in the <<<etc/hadoop/kms-site.xml>>>
-  configuration file:
-
-+---+
-  <property>
-    <name>hadoop.kms.cache.enable</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>hadoop.kms.cache.timeout.ms</name>
-    <value>600000</value>
-  </property>
-
-  <property>
-    <name>hadoop.kms.current.key.cache.timeout.ms</name>
-    <value>30000</value>
-  </property>
-+---+
-
-** KMS Aggregated Audit logs
-
-  Audit logs are aggregated for API accesses to the GET_KEY_VERSION,
-  GET_CURRENT_KEY, DECRYPT_EEK, GENERATE_EEK operations.
-
-  Entries are grouped by the (user,key,operation) combined key for a
-  configurable aggregation interval after which the number of accesses to the
-  specified end-point by the user for a given key is flushed to the audit log.
-
-  The Aggregation interval is configured via the property :
-
-+---+
-  <property>
-    <name>hadoop.kms.aggregation.delay.ms</name>
-    <value>10000</value>
-  </property>
-+---+
- 
-
-** Start/Stop the KMS
-
-  To start/stop KMS use KMS's bin/kms.sh script. For example:
-
-+---+
-hadoop-${project.version} $ sbin/kms.sh start
-+---+
-
-  NOTE: Invoking the script without any parameters list all possible
-  parameters (start, stop, run, etc.). The <<<kms.sh>>> script is a wrapper
-  for Tomcat's <<<catalina.sh>>> script that sets the environment variables
-  and Java System properties required to run KMS.
-
-** Embedded Tomcat Configuration
-
-  To configure the embedded Tomcat go to the 
<<<share/hadoop/kms/tomcat/conf>>>.
-
-  KMS pre-configures the HTTP and Admin ports in Tomcat's <<<server.xml>>> to
-  16000 and 16001.
-
-  Tomcat logs are also preconfigured to go to Hadoop's <<<logs/>>> directory.
-
-  The following environment variables (which can be set in KMS's
-  <<<etc/hadoop/kms-env.sh>>> script) can be used to alter those values:
-
-  * KMS_HTTP_PORT
-
-  * KMS_ADMIN_PORT
-
-  * KMS_MAX_THREADS
-
-  * KMS_LOG
-
-  NOTE: You need to restart the KMS for the configuration changes to take
-  effect.
-
-** Loading native libraries
-
-  The following environment variable (which can be set in KMS's
-  <<<etc/hadoop/kms-env.sh>>> script) can be used to specify the location
-  of any required native libraries. For eg. Tomact native Apache Portable
-  Runtime (APR) libraries:
-
-  * JAVA_LIBRARY_PATH
-
-** KMS Security Configuration
-
-*** Enabling Kerberos HTTP SPNEGO Authentication
-
-  Configure the Kerberos <<<etc/krb5.conf>>> file with the information of your
-  KDC server.
-
-  Create a service principal and its keytab for the KMS, it must be an
-  <<<HTTP>>> service principal.
-
-  Configure KMS <<<etc/hadoop/kms-site.xml>>> with the correct security values,
-  for example:
-
-+---+
-  <property>
-    <name>hadoop.kms.authentication.type</name>
-    <value>kerberos</value>
-  </property>
-
-  <property>
-    <name>hadoop.kms.authentication.kerberos.keytab</name>
-    <value>${user.home}/kms.keytab</value>
-  </property>
-
-  <property>
-    <name>hadoop.kms.authentication.kerberos.principal</name>
-    <value>HTTP/localhost</value>
-  </property>
-
-  <property>
-    <name>hadoop.kms.authentication.kerberos.name.rules</name>
-    <value>DEFAULT</value>
-  </property>
-+---+
-
-  NOTE: You need to restart the KMS for the configuration changes to take
-  effect.
-
-*** KMS Proxyuser Configuration
-
-  Each proxyuser must be configured in <<<etc/hadoop/kms-site.xml>>> using the
-  following properties:
-
-+---+
-  <property>
-    <name>hadoop.kms.proxyuser.#USER#.users</name>
-    <value>*</value>
-  </property>
-
-  <property>
-    <name>hadoop.kms.proxyuser.#USER#.groups</name>
-    <value>*</value>
-  </property>
-
-  <property>
-    <name>hadoop.kms.proxyuser.#USER#.hosts</name>
-    <value>*</value>
-  </property>
-+---+
-
-  <<<#USER#>>> is the username of the proxyuser to configure.
-
-  The <<<users>>> property indicates the users that can be impersonated.
-
-  The <<<groups>>> property indicates the groups users being impersonated must
-  belong to.
-
-  At least one of the <<<users>>> or <<<groups>>> properties must be defined.
-  If both are specified, then the configured proxyuser will be able to 
-  impersonate and user in the <<<users>>> list and any user belonging to one 
of 
-  the groups in the <<<groups>>> list.
-
-  The <<<hosts>>> property indicates from which host the proxyuser can make
-  impersonation requests.
-
-  If <<<users>>>, <<<groups>>> or <<<hosts>>> has a <<<*>>>, it means there are
-  no restrictions for the proxyuser regarding users, groups or hosts.
-  
-*** KMS over HTTPS (SSL)
-
-  To configure KMS to work over HTTPS the following 2 properties must be
-  set in the <<<etc/hadoop/kms_env.sh>>> script (shown with default values):
-
-    * KMS_SSL_KEYSTORE_FILE=${HOME}/.keystore
-
-    * KMS_SSL_KEYSTORE_PASS=password
-
-  In the KMS <<<tomcat/conf>>> directory, replace the <<<server.xml>>> file
-  with the provided <<<ssl-server.xml>>> file.
-
-  You need to create an SSL certificate for the KMS. As the
-  <<<kms>>> Unix user, using the Java <<<keytool>>> command to create the
-  SSL certificate:
-
-+---+
-$ keytool -genkey -alias tomcat -keyalg RSA
-+---+
-
-  You will be asked a series of questions in an interactive prompt.  It will
-  create the keystore file, which will be named <<.keystore>> and located in 
the
-  <<<kms>>> user home directory.
-
-  The password you enter for "keystore password" must match the  value of the
-  <<<KMS_SSL_KEYSTORE_PASS>>> environment variable set in the
-  <<<kms-env.sh>>> script in the configuration directory.
-
-  The answer to "What is your first and last name?" (i.e. "CN") must be the
-  hostname of the machine where the KMS will be running.
-
-  NOTE: You need to restart the KMS for the configuration changes to take
-  effect.
-
-*** KMS Access Control
-
-  KMS ACLs configuration are defined in the KMS <<<etc/hadoop/kms-acls.xml>>>
-  configuration file. This file is hot-reloaded when it changes.
-
-  KMS supports both fine grained access control as well as blacklist for kms
-  operations via a set ACL configuration properties.
-
-  A user accessing KMS is first checked for inclusion in the Access Control
-  List for the requested operation and then checked for exclusion in the
-  Black list for the operation before access is granted.
-
-
-+---+
-  <property>
-    <name>hadoop.kms.acl.CREATE</name>
-    <value>*</value>
-    <description>
-      ACL for create-key operations.
-      If the user is not in the GET ACL, the key material is not returned
-      as part of the response.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.CREATE</name>
-    <value>hdfs,foo</value>
-    <description>
-      Blacklist for create-key operations.
-      If the user is in the Blacklist, the key material is not returned
-      as part of the response.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.DELETE</name>
-    <value>*</value>
-    <description>
-      ACL for delete-key operations.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.DELETE</name>
-    <value>hdfs,foo</value>
-    <description>
-      Blacklist for delete-key operations.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.ROLLOVER</name>
-    <value>*</value>
-    <description>
-      ACL for rollover-key operations.
-      If the user is not in the GET ACL, the key material is not returned
-      as part of the response.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.ROLLOVER</name>
-    <value>hdfs,foo</value>
-    <description>
-      Blacklist for rollover-key operations.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.GET</name>
-    <value>*</value>
-    <description>
-      ACL for get-key-version and get-current-key operations.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.GET</name>
-    <value>hdfs,foo</value>
-    <description>
-      ACL for get-key-version and get-current-key operations.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.GET_KEYS</name>
-    <value>*</value>
-    <description>
-      ACL for get-keys operation.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.GET_KEYS</name>
-    <value>hdfs,foo</value>
-    <description>
-      Blacklist for get-keys operation.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.GET_METADATA</name>
-    <value>*</value>
-    <description>
-      ACL for get-key-metadata and get-keys-metadata operations.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.GET_METADATA</name>
-    <value>hdfs,foo</value>
-    <description>
-      Blacklist for get-key-metadata and get-keys-metadata operations.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.SET_KEY_MATERIAL</name>
-    <value>*</value>
-    <description>
-        Complimentary ACL for CREATE and ROLLOVER operation to allow the client
-        to provide the key material when creating or rolling a key.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.SET_KEY_MATERIAL</name>
-    <value>hdfs,foo</value>
-    <description>
-        Complimentary Blacklist for CREATE and ROLLOVER operation to allow the 
client
-        to provide the key material when creating or rolling a key.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.GENERATE_EEK</name>
-    <value>*</value>
-    <description>
-      ACL for generateEncryptedKey
-      CryptoExtension operations
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.blacklist.GENERATE_EEK</name>
-    <value>hdfs,foo</value>
-    <description>
-      Blacklist for generateEncryptedKey
-      CryptoExtension operations
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.acl.DECRYPT_EEK</name>
-    <value>*</value>
-    <description>
-      ACL for decrypt EncryptedKey
-      CryptoExtension operations
-    </description>
-  </property>
-</configuration>
-
-  <property>
-    <name>hadoop.kms.blacklist.DECRYPT_EEK</name>
-    <value>hdfs,foo</value>
-    <description>
-      Blacklist for decrypt EncryptedKey
-      CryptoExtension operations
-    </description>
-  </property>
-</configuration>
-
-+---+
-
-*** Key Access Control
-
-  KMS supports access control for all non-read operations at the Key level.
-  All Key Access operations are classified as :
-
-    * MANAGEMENT - createKey, deleteKey, rolloverNewVersion
-
-    * GENERATE_EEK - generateEncryptedKey, warmUpEncryptedKeys
-
-    * DECRYPT_EEK - decryptEncryptedKey
-
-    * READ - getKeyVersion, getKeyVersions, getMetadata, getKeysMetadata,
-             getCurrentKey
-
-    * ALL - all of the above
-
-  These can be defined in the KMS <<<etc/hadoop/kms-acls.xml>>> as follows
-
-  For all keys for which a key access has not been explicitly configured, It
-  is possible to configure a default key access control for a subset of the
-  operation types.
-
-  It is also possible to configure a "whitelist" key ACL for a subset of the
-  operation types. The whitelist key ACL is a whitelist in addition to the
-  explicit or default per-key ACL. That is, if no per-key ACL is explicitly
-  set, a user will be granted access if they are present in the default per-key
-  ACL or the whitelist key ACL. If a per-key ACL is explicitly set, a user
-  will be granted access if they are present in the per-key ACL or the
-  whitelist key ACL.
-
-  If no ACL is configured for a specific key AND no default ACL is configured
-  AND no root key ACL is configured for the requested operation,
-  then access will be DENIED.
-  
-  <<NOTE:>> The default and whitelist key ACL does not support <<<ALL>>>
-            operation qualifier.
-  
-+---+
-  <property>
-    <name>key.acl.testKey1.MANAGEMENT</name>
-    <value>*</value>
-    <description>
-      ACL for create-key, deleteKey and rolloverNewVersion operations.
-    </description>
-  </property>
-
-  <property>
-    <name>key.acl.testKey2.GENERATE_EEK</name>
-    <value>*</value>
-    <description>
-      ACL for generateEncryptedKey operations.
-    </description>
-  </property>
-
-  <property>
-    <name>key.acl.testKey3.DECRYPT_EEK</name>
-    <value>admink3</value>
-    <description>
-      ACL for decryptEncryptedKey operations.
-    </description>
-  </property>
-
-  <property>
-    <name>key.acl.testKey4.READ</name>
-    <value>*</value>
-    <description>
-      ACL for getKeyVersion, getKeyVersions, getMetadata, getKeysMetadata,
-      getCurrentKey operations
-    </description>
-  </property>
-
-  <property>
-    <name>key.acl.testKey5.ALL</name>
-    <value>*</value>
-    <description>
-      ACL for ALL operations.
-    </description>
-  </property>
-
-  <property>
-    <name>whitelist.key.acl.MANAGEMENT</name>
-    <value>admin1</value>
-    <description>
-      whitelist ACL for MANAGEMENT operations for all keys.
-    </description>
-  </property>
-
-  <!--
-  'testKey3' key ACL is defined. Since a 'whitelist'
-  key is also defined for DECRYPT_EEK, in addition to
-  admink3, admin1 can also perform DECRYPT_EEK operations
-  on 'testKey3'
-  -->
-  <property>
-    <name>whitelist.key.acl.DECRYPT_EEK</name>
-    <value>admin1</value>
-    <description>
-      whitelist ACL for DECRYPT_EEK operations for all keys.
-    </description>
-  </property>
-
-  <property>
-    <name>default.key.acl.MANAGEMENT</name>
-    <value>user1,user2</value>
-    <description>
-      default ACL for MANAGEMENT operations for all keys that are not
-      explicitly defined.
-    </description>
-  </property>
-
-  <property>
-    <name>default.key.acl.GENERATE_EEK</name>
-    <value>user1,user2</value>
-    <description>
-      default ACL for GENERATE_EEK operations for all keys that are not
-      explicitly defined.
-    </description>
-  </property>
-
-  <property>
-    <name>default.key.acl.DECRYPT_EEK</name>
-    <value>user1,user2</value>
-    <description>
-      default ACL for DECRYPT_EEK operations for all keys that are not
-      explicitly defined.
-    </description>
-  </property>
-
-  <property>
-    <name>default.key.acl.READ</name>
-    <value>user1,user2</value>
-    <description>
-      default ACL for READ operations for all keys that are not
-      explicitly defined.
-    </description>
-  </property>
-+---+
-
-** KMS Delegation Token Configuration
-
-  KMS delegation token secret manager can be configured with the following
-  properties:
-
-+---+
-  <property>
-    <name>hadoop.kms.authentication.delegation-token.update-interval.sec</name>
-    <value>86400</value>
-    <description>
-      How often the master key is rotated, in seconds. Default value 1 day.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.authentication.delegation-token.max-lifetime.sec</name>
-    <value>604800</value>
-    <description>
-      Maximum lifetime of a delagation token, in seconds. Default value 7 days.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.authentication.delegation-token.renew-interval.sec</name>
-    <value>86400</value>
-    <description>
-      Renewal interval of a delagation token, in seconds. Default value 1 day.
-    </description>
-  </property>
-
-  <property>
-    
<name>hadoop.kms.authentication.delegation-token.removal-scan-interval.sec</name>
-    <value>3600</value>
-    <description>
-      Scan interval to remove expired delegation tokens.
-    </description>
-  </property>
-+---+
-
-
-** Using Multiple Instances of KMS Behind a Load-Balancer or VIP
-
-  KMS supports multiple KMS instances behind a load-balancer or VIP for
-  scalability and for HA purposes.
-
-  When using multiple KMS instances behind a load-balancer or VIP, requests 
from
-  the same user may be handled by different KMS instances.
-
-  KMS instances behind a load-balancer or VIP must be specially configured to
-  work properly as a single logical service.
-
-*** HTTP Kerberos Principals Configuration
-
-  When KMS instances are behind a load-balancer or VIP, clients will use the
-  hostname of the VIP. For Kerberos SPNEGO authentication, the hostname of the
-  URL is used to construct the Kerberos service name of the server,
-  <<<HTTP/#HOSTNAME#>>>. This means that all KMS instances must have a Kerberos
-  service name with the load-balancer or VIP hostname.
-
-  In order to be able to access directly a specific KMS instance, the KMS
-  instance must also have Keberos service name with its own hostname. This is
-  required for monitoring and admin purposes.
-
-  Both Kerberos service principal credentials (for the load-balancer/VIP
-  hostname and for the actual KMS instance hostname) must be in the keytab file
-  configured for authentication. And the principal name specified in the
-  configuration must be '*'. For example:
-
-+---+
-  <property>
-    <name>hadoop.kms.authentication.kerberos.principal</name>
-    <value>*</value>
-  </property>
-+---+
-
-  <<NOTE:>> If using HTTPS, the SSL certificate used by the KMS instance must
-  be configured to support multiple hostnames (see Java 7
-  <<<keytool>>> SAN extension support for details on how to do this).
-
-*** HTTP Authentication Signature
-
-  KMS uses Hadoop Authentication for HTTP authentication. Hadoop Authentication
-  issues a signed HTTP Cookie once the client has authenticated successfully.
-  This HTTP Cookie has an expiration time, after which it will trigger a new
-  authentication sequence. This is done to avoid triggering the authentication
-  on every HTTP request of a client.
-
-  A KMS instance must verify the HTTP Cookie signatures signed by other KMS
-  instances. To do this all KMS instances must share the signing secret.
-
-  This secret sharing can be done using a Zookeeper service which is configured
-  in KMS with the following properties in the <<<kms-site.xml>>>:
-
-+---+
-  <property>
-    <name>hadoop.kms.authentication.signer.secret.provider</name>
-    <value>zookeeper</value>
-    <description>
-      Indicates how the secret to sign the authentication cookies will be
-      stored. Options are 'random' (default), 'string' and 'zookeeper'.
-      If using a setup with multiple KMS instances, 'zookeeper' should be used.
-    </description>
-  </property>
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.path</name>
-    <value>/hadoop-kms/hadoop-auth-signature-secret</value>
-    <description>
-      The Zookeeper ZNode path where the KMS instances will store and retrieve
-      the secret from.
-    </description>
-  </property>
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string</name>
-    <value>#HOSTNAME#:#PORT#,...</value>
-    <description>
-      The Zookeeper connection string, a list of hostnames and port comma
-      separated.
-    </description>
-  </property>
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type</name>
-    <value>kerberos</value>
-    <description>
-      The Zookeeper authentication type, 'none' or 'sasl' (Kerberos).
-    </description>
-  </property>
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab</name>
-    <value>/etc/hadoop/conf/kms.keytab</value>
-    <description>
-      The absolute path for the Kerberos keytab with the credentials to
-      connect to Zookeeper.
-    </description>
-  </property>
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal</name>
-    <value>kms/#HOSTNAME#</value>
-    <description>
-      The Kerberos service principal used to connect to Zookeeper.
-    </description>
-  </property>
-+---+
-
-*** Delegation Tokens
-
-  TBD
-
-** KMS HTTP REST API
-
-*** Create a Key
-
-  <REQUEST:>
-
-+---+
-POST http://HOST:PORT/kms/v1/keys
-Content-Type: application/json
-
-{
-  "name"        : "<key-name>",
-  "cipher"      : "<cipher>",
-  "length"      : <length>,        //int
-  "material"    : "<material>",    //base64
-  "description" : "<description>"
-}
-+---+
-  
-  <RESPONSE:>
-  
-+---+
-201 CREATED
-LOCATION: http://HOST:PORT/kms/v1/key/<key-name>
-Content-Type: application/json
-
-{
-  "name"        : "versionName",
-  "material"    : "<material>",    //base64, not present without GET ACL
-}
-+---+
-
-*** Rollover Key
-
-  <REQUEST:>
-
-+---+
-POST http://HOST:PORT/kms/v1/key/<key-name>
-Content-Type: application/json
-
-{
-  "material"    : "<material>",
-}
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-{
-  "name"        : "versionName",
-  "material"    : "<material>",    //base64, not present without GET ACL
-}
-+---+
-
-*** Delete Key
-
-  <REQUEST:>
-
-+---+
-DELETE http://HOST:PORT/kms/v1/key/<key-name>
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-+---+
-
-*** Get Key Metadata
-
-  <REQUEST:>
-
-+---+
-GET http://HOST:PORT/kms/v1/key/<key-name>/_metadata
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-{
-  "name"        : "<key-name>",
-  "cipher"      : "<cipher>",
-  "length"      : <length>,        //int
-  "description" : "<description>",
-  "created"     : <millis-epoc>,   //long
-  "versions"    : <versions>       //int
-}
-+---+
-
-*** Get Current Key
-
-  <REQUEST:>
-
-+---+
-GET http://HOST:PORT/kms/v1/key/<key-name>/_currentversion
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-{
-  "name"        : "versionName",
-  "material"    : "<material>",    //base64
-}
-+---+
-
-
-*** Generate Encrypted Key for Current KeyVersion
-
-  <REQUEST:>
-
-+---+
-GET 
http://HOST:PORT/kms/v1/key/<key-name>/_eek?eek_op=generate&num_keys=<number-of-keys-to-generate>
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-[
-  {
-    "versionName"         : "encryptionVersionName",
-    "iv"                  : "<iv>",          //base64
-    "encryptedKeyVersion" : {
-        "versionName"       : "EEK",
-        "material"          : "<material>",    //base64
-    }
-  },
-  {
-    "versionName"         : "encryptionVersionName",
-    "iv"                  : "<iv>",          //base64
-    "encryptedKeyVersion" : {
-        "versionName"       : "EEK",
-        "material"          : "<material>",    //base64
-    }
-  },
-  ...
-]
-+---+
-
-*** Decrypt Encrypted Key
-
-  <REQUEST:>
-
-+---+
-POST http://HOST:PORT/kms/v1/keyversion/<version-name>/_eek?ee_op=decrypt
-Content-Type: application/json
-
-{
-  "name"        : "<key-name>",
-  "iv"          : "<iv>",          //base64
-  "material"    : "<material>",    //base64
-}
-
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-{
-  "name"        : "EK",
-  "material"    : "<material>",    //base64
-}
-+---+
-
-
-*** Get Key Version
-
-  <REQUEST:>
-
-+---+
-GET http://HOST:PORT/kms/v1/keyversion/<version-name>
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-{
-  "name"        : "versionName",
-  "material"    : "<material>",    //base64
-}
-+---+
-
-*** Get Key Versions
-
-  <REQUEST:>
-
-+---+
-GET http://HOST:PORT/kms/v1/key/<key-name>/_versions
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-[
-  {
-    "name"        : "versionName",
-    "material"    : "<material>",    //base64
-  },
-  {
-    "name"        : "versionName",
-    "material"    : "<material>",    //base64
-  },
-  ...
-]
-+---+
-
-*** Get Key Names
-
-  <REQUEST:>
-
-+---+
-GET http://HOST:PORT/kms/v1/keys/names
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-[
-  "<key-name>",
-  "<key-name>",
-  ...
-]
-+---+
-
-*** Get Keys Metadata
-
-+---+
-GET http://HOST:PORT/kms/v1/keys/metadata?key=<key-name>&key=<key-name>,...
-+---+
-
-  <RESPONSE:>
-
-+---+
-200 OK
-Content-Type: application/json
-
-[
-  {
-    "name"        : "<key-name>",
-    "cipher"      : "<cipher>",
-    "length"      : <length>,        //int
-    "description" : "<description>",
-    "created"     : <millis-epoc>,   //long
-    "versions"    : <versions>       //int
-  },
-  {
-    "name"        : "<key-name>",
-    "cipher"      : "<cipher>",
-    "length"      : <length>,        //int
-    "description" : "<description>",
-    "created"     : <millis-epoc>,   //long
-    "versions"    : <versions>       //int
-  },
-  ...
-]
-+---+

Reply via email to