This is an automated email from the ASF dual-hosted git repository.
xyao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 4a9fde5 HDDS-3282. ozone.http.filter.initializers can't be set
properly for S… (#724)
4a9fde5 is described below
commit 4a9fde5816ce092c7f60e59f1229c1be1869ca31
Author: Xiaoyu Yao <[email protected]>
AuthorDate: Wed May 6 15:46:06 2020 -0700
HDDS-3282. ozone.http.filter.initializers can't be set properly for S…
(#724)
---
.../org/apache/hadoop/hdds/HddsConfigKeys.java | 10 +-
.../org/apache/hadoop/ozone/OzoneConfigKeys.java | 9 +-
.../common/src/main/resources/ozone-default.xml | 87 ++++++++----
.../hadoop/ozone/HddsDatanodeHttpServer.java | 10 ++
hadoop-hdds/docs/content/security/SecureOzone.md | 4 +-
.../hadoop/hdds/server/http/BaseHttpServer.java | 76 +++++-----
.../hadoop/hdds/server/http/FilterContainer.java | 42 ------
.../hadoop/hdds/server/http/FilterInitializer.java | 33 -----
.../hadoop/hdds/server/http/HttpServer2.java | 22 ++-
.../hdds/server/http/StaticUserWebFilter.java | 157 ---------------------
.../hdds/server/http/TestBaseHttpServer.java | 10 ++
.../hdds/scm/server/SCMHTTPServerConfig.java | 14 +-
.../server/StorageContainerManagerHttpServer.java | 10 ++
.../org/apache/hadoop/ozone/om/OMConfigKeys.java | 9 +-
.../src/main/compose/ozonesecure-mr/docker-config | 15 +-
.../src/main/compose/ozonesecure/docker-config | 56 +++++---
.../hadoop/ozone/TestOzoneConfigurationFields.java | 4 +-
.../hadoop/ozone/om/OzoneManagerHttpServer.java | 15 +-
.../hadoop/ozone/recon/ConfigurationProvider.java | 13 +-
.../apache/hadoop/ozone/recon/ReconHttpServer.java | 10 ++
.../hadoop/ozone/recon/ReconServerConfigKeys.java | 15 +-
.../hadoop/ozone/s3/S3GatewayConfigKeys.java | 14 +-
.../hadoop/ozone/s3/S3GatewayHttpServer.java | 10 ++
.../apache/hadoop/ozone/freon/FreonHttpServer.java | 10 ++
24 files changed, 303 insertions(+), 352 deletions(-)
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
index d3cf862..9c77a1e 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
@@ -245,16 +245,20 @@ public final class HddsConfigKeys {
public static final String HDDS_DATANODE_HTTP_BIND_HOST_DEFAULT = "0.0.0.0";
public static final int HDDS_DATANODE_HTTP_BIND_PORT_DEFAULT = 9882;
public static final int HDDS_DATANODE_HTTPS_BIND_PORT_DEFAULT = 9883;
+
+ public static final String OZONE_DATANODE_HTTP_AUTH_CONFIG_PREFIX =
+ "hdds.datanode.http.auth.";
+ public static final String HDDS_DATANODE_HTTP_AUTH_TYPE =
+ OZONE_DATANODE_HTTP_AUTH_CONFIG_PREFIX + "type";
public static final String
HDDS_DATANODE_HTTP_KERBEROS_PRINCIPAL_KEY =
- "hdds.datanode.http.kerberos.principal";
+ OZONE_DATANODE_HTTP_AUTH_CONFIG_PREFIX + "kerberos.principal";
public static final String
HDDS_DATANODE_HTTP_KERBEROS_KEYTAB_FILE_KEY =
- "hdds.datanode.http.kerberos.keytab";
+ OZONE_DATANODE_HTTP_AUTH_CONFIG_PREFIX + "kerberos.keytab";
public static final String HDDS_DATANODE_RATIS_SERVER_REQUEST_TIMEOUT =
"hdds.datanode.ratis.server.request.timeout";
public static final String
HDDS_DATANODE_RATIS_SERVER_REQUEST_TIMEOUT_DEFAULT = "2m";
-
}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
index fe1c440..322d2b6 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
@@ -424,10 +424,15 @@ public final class OzoneConfigKeys {
public static final int OZONE_FREON_HTTPS_BIND_PORT_DEFAULT = 9885;
public static final String
OZONE_FREON_HTTP_KERBEROS_PRINCIPAL_KEY =
- "ozone.freon.http.kerberos.principal";
+ "ozone.freon.http.auth.kerberos.principal";
public static final String
OZONE_FREON_HTTP_KERBEROS_KEYTAB_FILE_KEY =
- "ozone.freon.http.kerberos.keytab";
+ "ozone.freon.http.auth.kerberos.keytab";
+ public static final String OZONE_FREON_HTTP_AUTH_TYPE =
+ "ozone.freon.http.auth.type";
+ public static final String OZONE_FREON_HTTP_AUTH_CONFIG_PREFIX =
+ "ozone.freon.http.auth.";
+
public static final String OZONE_NETWORK_TOPOLOGY_AWARE_READ_KEY =
"ozone.network.topology.aware.read";
diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml
b/hadoop-hdds/common/src/main/resources/ozone-default.xml
index 722f9e4..5cf6619 100644
--- a/hadoop-hdds/common/src/main/resources/ozone-default.xml
+++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml
@@ -1435,16 +1435,6 @@
</property>
<property>
- <name>ozone.s3g.authentication.kerberos.principal</name>
- <value/>
- <tag>OZONE, S3GATEWAY</tag>
- <description>The server principal used by Ozone S3Gateway server. This is
- typically set to
- HTTP/[email protected] The SPNEGO server principal begins with the prefix
- HTTP/ by convention.</description>
- </property>
-
- <property>
<name>ozone.s3g.domain.name</name>
<value/>
<tag>OZONE, S3GATEWAY</tag>
@@ -1500,7 +1490,17 @@
</property>
<property>
- <name>ozone.s3g.keytab.file</name>
+ <name>ozone.s3g.http.auth.kerberos.principal</name>
+ <value/>
+ <tag>OZONE, S3GATEWAY</tag>
+ <description>The server principal used by Ozone S3Gateway server. This is
+ typically set to
+ HTTP/[email protected] The SPNEGO server principal begins with the prefix
+ HTTP/ by convention.</description>
+ </property>
+
+ <property>
+ <name>ozone.s3g.http.auth.kerberos.keytab</name>
<value/>
<tag>OZONE, S3GATEWAY</tag>
<description>The keytab file used by the S3Gateway server to login as its
@@ -1785,14 +1785,14 @@
<description>The OzoneManager service principal. Ex
om/[email protected]</description>
</property>
<property>
- <name>ozone.om.http.kerberos.principal</name>
+ <name>ozone.om.http.auth.kerberos.principal</name>
<value>HTTP/[email protected]</value>
<description>
OzoneManager http server kerberos principal.
</description>
</property>
<property>
- <name>ozone.om.http.kerberos.keytab</name>
+ <name>ozone.om.http.auth.kerberos.keytab</name>
<value>/etc/security/keytabs/HTTP.keytab</value>
<description>
OzoneManager http server kerberos keytab.
@@ -2024,7 +2024,7 @@
</description>
</property>
<property>
- <name>ozone.freon.http.kerberos.principal</name>
+ <name>ozone.freon.http.auth.kerberos.principal</name>
<value>HTTP/[email protected]</value>
<tag>SECURITY</tag>
<description>
@@ -2032,7 +2032,7 @@
</description>
</property>
<property>
- <name>ozone.freon.http.kerberos.keytab</name>
+ <name>ozone.freon.http.auth.kerberos.keytab</name>
<value>/etc/security/keytabs/HTTP.keytab</value>
<tag>SECURITY</tag>
<description>
@@ -2086,7 +2086,7 @@
</property>
<property>
- <name>hdds.datanode.http.kerberos.principal</name>
+ <name>hdds.datanode.http.auth.kerberos.principal</name>
<value>HTTP/[email protected]</value>
<tag>HDDS, SECURITY, MANAGEMENT</tag>
<description>
@@ -2094,7 +2094,7 @@
</description>
</property>
<property>
- <name>hdds.datanode.http.kerberos.keytab</name>
+ <name>hdds.datanode.http.auth.kerberos.keytab</name>
<value>/etc/security/keytabs/HTTP.keytab</value>
<tag>HDDS, SECURITY, MANAGEMENT</tag>
<description>
@@ -2220,16 +2220,7 @@
</description>
</property>
<property>
- <name>ozone.recon.keytab.file</name>
- <value/>
- <tag>RECON, SECURITY</tag>
- <description>
- DEPRECATED. Use "ozone.recon.http.kerberos.keytab.file" instead.
- The keytab file for HTTP Kerberos authentication in Recon.
- </description>
- </property>
- <property>
- <name>ozone.recon.http.kerberos.keytab.file</name>
+ <name>ozone.recon.http.auth.kerberos.keytab</name>
<value/>
<tag>RECON, SECURITY</tag>
<description>
@@ -2237,7 +2228,7 @@
</description>
</property>
<property>
- <name>ozone.recon.authentication.kerberos.principal</name>
+ <name>ozone.recon.http.auth.kerberos.principal</name>
<value/>
<tag>RECON</tag>
<description>The server principal used by Ozone Recon server. This is
@@ -2246,6 +2237,46 @@
</description>
</property>
<property>
+ <name>hdds.datanode.http.auth.type </name>
+ <value>simple</value>
+ <tag>DATANODE, SECURITY</tag>
+ <description> simple or kerberos. If kerberos is set, Kerberos SPNEOGO
+ will be used for http authentication.
+ </description>
+ </property>
+ <property>
+ <name>ozone.freon.http.auth.type </name>
+ <value>simple</value>
+ <tag>FREON, SECURITY</tag>
+ <description> simple or kerberos. If kerberos is set, Kerberos SPNEOGO
+ will be used for http authentication.
+ </description>
+ </property>
+ <property>
+ <name>ozone.om.http.auth.type </name>
+ <value>simple</value>
+ <tag>OM, SECURITY</tag>
+ <description> simple or kerberos. If kerberos is set, Kerberos SPNEOGO
+ will be used for http authentication.
+ </description>
+ </property>
+ <property>
+ <name>ozone.recon.http.auth.type </name>
+ <value>simple</value>
+ <tag>RECON, SECURITY</tag>
+ <description> simple or kerberos. If kerberos is set, Kerberos SPNEOGO
+ will be used for http authentication.
+ </description>
+ </property>
+ <property>
+ <name>ozone.s3g.http.auth.type </name>
+ <value>simple</value>
+ <tag>S3G, SECURITY</tag>
+ <description> simple or kerberos. If kerberos is set, Kerberos SPNEOGO
+ will be used for http authentication.
+ </description>
+ </property>
+ <property>
<name>ozone.recon.container.db.cache.size.mb</name>
<value>128</value>
<tag>RECON, PERFORMANCE</tag>
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java
index 74155c2..f533a26 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeHttpServer.java
@@ -83,4 +83,14 @@ public class HddsDatanodeHttpServer extends BaseHttpServer {
protected String getEnabledKey() {
return HddsConfigKeys.HDDS_DATANODE_HTTP_ENABLED_KEY;
}
+
+ @Override
+ protected String getHttpAuthType() {
+ return HddsConfigKeys.HDDS_DATANODE_HTTP_AUTH_TYPE;
+ }
+
+ @Override
+ protected String getHttpAuthConfigPrefix() {
+ return HddsConfigKeys.OZONE_DATANODE_HTTP_AUTH_CONFIG_PREFIX;
+ }
}
diff --git a/hadoop-hdds/docs/content/security/SecureOzone.md
b/hadoop-hdds/docs/content/security/SecureOzone.md
index b33a603..7d3693e 100644
--- a/hadoop-hdds/docs/content/security/SecureOzone.md
+++ b/hadoop-hdds/docs/content/security/SecureOzone.md
@@ -164,11 +164,11 @@ All these settings should be made in ozone-site.xml.
</thead>
<tbody>
<tr>
- <td>ozone.s3g.authentication.kerberos.principal</th>
+ <td>ozone.s3g.http.auth.kerberos.principal</th>
<td>S3 Gateway principal. <br/> e.g. HTTP/[email protected]</td>
</tr>
<tr>
- <td>ozone.s3g.keytab.file</th>
+ <td>ozone.s3g.http.auth.kerberos.keytab</th>
<td>The keytab file used by S3 gateway</td>
</tr>
</tbody>
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
index 025a68c..9cb4992 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneSecurityUtil;
-import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
@@ -47,9 +46,14 @@ import static
org.apache.hadoop.hdds.server.http.HttpConfig.getHttpPolicy;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS;
import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_HTTPS_NEED_AUTH_DEFAULT;
import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_HTTPS_NEED_AUTH_KEY;
+import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_HTTP_SECURITY_ENABLED_DEFAULT;
+import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_HTTP_SECURITY_ENABLED_KEY;
+import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_DEFAULT;
+import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY;
import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYPASSWORD_KEY;
import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY;
import static
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY;
+
import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -95,8 +99,29 @@ public abstract class BaseHttpServer {
conf.set("hadoop.prometheus.endpoint.enabled", "false");
HttpServer2.Builder builder = newHttpServer2BuilderForOzone(
- conf, httpAddress, httpsAddress,
- name, getSpnegoPrincipal(), getKeytabFile());
+ conf, httpAddress, httpsAddress, name);
+
+ boolean isSecurityEnabled = UserGroupInformation.isSecurityEnabled() &&
+ OzoneSecurityUtil.isHttpSecurityEnabled(conf);
+ LOG.info("Hadoop Security Enabled: {} " +
+ "Ozone Security Enabled: {} " +
+ "Ozone HTTP Security Enabled: {} ",
+ UserGroupInformation.isSecurityEnabled(),
+ conf.getBoolean(OZONE_SECURITY_ENABLED_KEY,
+ OZONE_SECURITY_ENABLED_DEFAULT),
+ conf.getBoolean(OZONE_HTTP_SECURITY_ENABLED_KEY,
+ OZONE_HTTP_SECURITY_ENABLED_DEFAULT));
+
+ if (isSecurityEnabled) {
+ String httpAuthType = conf.get(getHttpAuthType(), "simple");
+ LOG.info("HttpAuthType: {} = {}", getHttpAuthType(), httpAuthType);
+ if (httpAuthType.equals("kerberos")) {
+ builder.setSecurityEnabled(true);
+ builder.authFilterConfigurationPrefix(getHttpAuthConfigPrefix());
+ builder.setUsernameConfKey(getSpnegoPrincipal());
+ builder.setKeytabConfKey(getKeytabFile());
+ }
+ }
final boolean xFrameEnabled = conf.getBoolean(
DFSConfigKeysLegacy.DFS_XFRAME_OPTION_ENABLED,
@@ -162,26 +187,14 @@ public abstract class BaseHttpServer {
*/
public static HttpServer2.Builder newHttpServer2BuilderForOzone(
ConfigurationSource conf, final InetSocketAddress httpAddr,
- final InetSocketAddress httpsAddr, String name, String spnegoUserNameKey,
- String spnegoKeytabFileKey) throws IOException {
+ final InetSocketAddress httpsAddr, String name) throws IOException {
HttpConfig.Policy policy = getHttpPolicy(conf);
- boolean isSecurityEnabled = UserGroupInformation.isSecurityEnabled() &&
- OzoneSecurityUtil.isHttpSecurityEnabled(conf);
HttpServer2.Builder builder = new HttpServer2.Builder().setName(name)
.setConf(conf).setACL(new AccessControlList(conf.get(
- OZONE_ADMINISTRATORS, " ")))
- .setSecurityEnabled(isSecurityEnabled)
- .setUsernameConfKey(spnegoUserNameKey)
- .setKeytabConfKey(spnegoKeytabFileKey);
+ OZONE_ADMINISTRATORS, " ")));
// initialize the webserver for uploading/downloading files.
- if (isSecurityEnabled) {
- LOG.info("Starting web server as: "
- + SecurityUtil.getServerPrincipal(conf.get(spnegoUserNameKey),
- httpAddr.getHostName()));
- }
-
if (policy.isHttpEnabled()) {
if (httpAddr.getPort() == 0) {
builder.setFindPort(true);
@@ -220,6 +233,12 @@ public abstract class BaseHttpServer {
httpServer.addServlet(servletName, pathSpec, clazz);
}
+ protected void addInternalServlet(String servletName, String pathSpec,
+ Class<? extends HttpServlet> clazz) {
+ httpServer.addInternalServlet(servletName, pathSpec, clazz);
+ }
+
+
/**
* Returns the WebAppContext associated with this HttpServer.
*
@@ -332,25 +351,6 @@ public abstract class BaseHttpServer {
sslConf.get("ssl.server.exclude.cipher.list"));
}
-
- /**
- * Get SPNEGO keytab Key from configuration.
- *
- * @param conf Configuration
- * @param defaultKey default key to be used for config lookup
- * @return DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY if the key is not empty
- * else return defaultKey
- */
- public static String getSpnegoKeytabKey(ConfigurationSource conf,
- String defaultKey) {
- String value =
- conf.get(
- DFSConfigKeysLegacy.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
- return (value == null || value.isEmpty()) ?
- defaultKey :
- DFSConfigKeysLegacy.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY;
- }
-
/**
* Leverages the Configuration.getPassword method to attempt to get
* passwords from the CredentialProvider API before falling back to
@@ -437,4 +437,8 @@ public abstract class BaseHttpServer {
protected abstract String getEnabledKey();
+ protected abstract String getHttpAuthType();
+
+ protected abstract String getHttpAuthConfigPrefix();
+
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterContainer.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterContainer.java
deleted file mode 100644
index 3b2b8eb..0000000
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterContainer.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hdds.server.http;
-
-import java.util.Map;
-
-/**
- * A container class for javax.servlet.Filter.
- */
-public interface FilterContainer {
- /**
- * Add a filter to the container.
- * @param name Filter name
- * @param classname Filter class name
- * @param parameters a map from parameter names to initial values
- */
- void addFilter(String name, String classname, Map<String, String>
parameters);
-
- /**
- * Add a global filter to the container.
- * @param name filter name
- * @param classname filter class name
- * @param parameters a map from parameter names to initial values
- */
- void addGlobalFilter(String name, String classname,
- Map<String, String> parameters);
-}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterInitializer.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterInitializer.java
deleted file mode 100644
index 4d6b65b..0000000
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/FilterInitializer.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hdds.server.http;
-
-import org.apache.hadoop.hdds.conf.ConfigurationSource;
-
-/**
- * Initialize a javax.servlet.Filter.
- */
-public abstract class FilterInitializer {
- /**
- * Initialize a Filter to a FilterContainer.
- * @param container The filter container
- * @param conf Configuration for run-time parameters
- */
- public abstract void initFilter(FilterContainer container,
- ConfigurationSource conf);
-}
\ No newline at end of file
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
index a57fe54..a37a08c 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
@@ -57,6 +57,9 @@ import org.apache.hadoop.hdds.annotation.InterfaceStability;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.utils.LegacyHadoopConfigurationSource;
+import org.apache.hadoop.http.FilterContainer;
+import org.apache.hadoop.http.FilterInitializer;
+import org.apache.hadoop.http.lib.StaticUserWebFilter;
import org.apache.hadoop.jmx.JMXJsonServlet;
import org.apache.hadoop.log.LogLevel;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
@@ -104,6 +107,8 @@ import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static
org.apache.hadoop.security.AuthenticationFilterInitializer.getFilterConfigMap;
+
/**
* Create a Jetty embedded server to answer http requests. The primary goal is
* to serve up status information for the server. There are three contexts:
@@ -435,6 +440,8 @@ public final class HttpServer2 implements FilterContainer {
HttpServer2 server = new HttpServer2(this);
if (this.securityEnabled) {
+ LOG.info("Initialize spnego with host: {} userKey: {} keytabKey: {}",
+ hostName, usernameConfKey, keytabConfKey);
server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
}
@@ -564,12 +571,13 @@ public final class HttpServer2 implements FilterContainer
{
this.findPort = b.findPort;
this.portRanges = b.portRanges;
- initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs);
+ initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs,
+ b.authFilterConfigurationPrefix);
}
private void initializeWebServer(String name, String hostName,
- ConfigurationSource conf, String[] pathSpecs)
- throws IOException {
+ ConfigurationSource conf, String[] pathSpecs,
+ String authFilterConfigPrefix) throws IOException {
Preconditions.checkNotNull(webAppContext);
@@ -607,7 +615,13 @@ public final class HttpServer2 implements FilterContainer {
if (initializers != null) {
conf.set(BIND_ADDRESS, hostName);
for (FilterInitializer c : initializers) {
- c.initFilter(this, conf);
+ //c.initFilter(this, conf) does not work here as it does not take
config
+ // prefix key.
+ Map<String, String> filterConfig = getFilterConfigMap(
+ LegacyHadoopConfigurationSource.asHadoopConfiguration(conf),
+ authFilterConfigPrefix);
+ addFilter("authentication", AuthenticationFilter.class.getName(),
+ filterConfig);
}
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/StaticUserWebFilter.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/StaticUserWebFilter.java
deleted file mode 100644
index 1428ba3..0000000
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/StaticUserWebFilter.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hdds.server.http;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import java.io.IOException;
-import java.security.Principal;
-import java.util.HashMap;
-
-import org.apache.hadoop.hdds.conf.ConfigurationSource;
-
-import static
org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
-import static
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Provides a servlet filter that pretends to authenticate a fake user (Dr.Who)
- * so that the web UI is usable for a secure cluster without authentication.
- */
-public class StaticUserWebFilter extends FilterInitializer {
- static final String DEPRECATED_UGI_KEY = "dfs.web.ugi";
-
- private static final Logger LOG =
- LoggerFactory.getLogger(StaticUserWebFilter.class);
-
- static class User implements Principal {
- private final String name;
-
- User(String name) {
- this.name = name;
- }
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public int hashCode() {
- return name.hashCode();
- }
-
- @Override
- public boolean equals(Object other) {
- if (other == this) {
- return true;
- } else if (other == null || other.getClass() != getClass()) {
- return false;
- }
- return ((User) other).name.equals(name);
- }
-
- @Override
- public String toString() {
- return name;
- }
- }
-
- /**
- * JavaEE filter implementation to do the work.
- */
- public static class StaticUserFilter implements Filter {
- private User user;
- private String username;
-
- @Override
- public void destroy() {
- // NOTHING
- }
-
- @Override
- public void doFilter(ServletRequest request, ServletResponse response,
- FilterChain chain
- ) throws IOException, ServletException {
- HttpServletRequest httpRequest = (HttpServletRequest) request;
- // if the user is already authenticated, don't override it
- if (httpRequest.getRemoteUser() != null) {
- chain.doFilter(request, response);
- } else {
- HttpServletRequestWrapper wrapper =
- new HttpServletRequestWrapper(httpRequest) {
- @Override
- public Principal getUserPrincipal() {
- return user;
- }
-
- @Override
- public String getRemoteUser() {
- return username;
- }
- };
- chain.doFilter(wrapper, response);
- }
- }
-
- @Override
- public void init(FilterConfig conf) throws ServletException {
- this.username = conf.getInitParameter(HADOOP_HTTP_STATIC_USER);
- this.user = new User(username);
- }
-
- }
-
- @Override
- public void initFilter(FilterContainer container, ConfigurationSource conf) {
- HashMap<String, String> options = new HashMap<String, String>();
-
- String username = getUsernameFromConf(conf);
- options.put(HADOOP_HTTP_STATIC_USER, username);
-
- container.addFilter("static_user_filter",
- StaticUserFilter.class.getName(),
- options);
- }
-
- /**
- * Retrieve the static username from the configuration.
- */
- static String getUsernameFromConf(ConfigurationSource conf) {
- String oldStyleUgi = conf.get(DEPRECATED_UGI_KEY);
- if (oldStyleUgi != null) {
- // We can't use the normal configuration deprecation mechanism here
- // since we need to split out the username from the configured UGI.
- LOG.warn(DEPRECATED_UGI_KEY + " should not be used. Instead, use " +
- HADOOP_HTTP_STATIC_USER + ".");
- String[] parts = oldStyleUgi.split(",");
- return parts[0];
- } else {
- return conf.get(HADOOP_HTTP_STATIC_USER,
- DEFAULT_HADOOP_HTTP_STATIC_USER);
- }
- }
-
-}
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestBaseHttpServer.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestBaseHttpServer.java
index 1730df6..4f73375 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestBaseHttpServer.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/http/TestBaseHttpServer.java
@@ -81,6 +81,16 @@ public class TestBaseHttpServer {
protected String getEnabledKey() {
return "enabled";
}
+
+ @Override
+ protected String getHttpAuthType() {
+ return "simple";
+ }
+
+ @Override
+ protected String getHttpAuthConfigPrefix() {
+ return null;
+ }
};
conf.set("addresskey", "0.0.0.0:1234");
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMHTTPServerConfig.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMHTTPServerConfig.java
index 18eb8b3..2b6fa03 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMHTTPServerConfig.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMHTTPServerConfig.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hdds.conf.ConfigType;
/**
* SCM HTTP Server configuration in Java style configuration class.
*/
-@ConfigGroup(prefix = "hdds.scm.http")
+@ConfigGroup(prefix = "hdds.scm.http.auth")
public class SCMHTTPServerConfig {
@Config(key = "kerberos.principal",
@@ -75,9 +75,17 @@ public class SCMHTTPServerConfig {
* ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY)
*/
public static class ConfigStrings {
+ public static final String HDDS_SCM_HTTP_AUTH_CONFIG_PREFIX =
+ SCMHTTPServerConfig.class.getAnnotation(ConfigGroup.class).prefix() +
+ ".";
+
+ public static final String HDDS_SCM_HTTP_AUTH_TYPE =
+ HDDS_SCM_HTTP_AUTH_CONFIG_PREFIX + "type";
+
public static final String HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY =
- "hdds.scm.http.kerberos.principal";
+ HDDS_SCM_HTTP_AUTH_CONFIG_PREFIX + "kerberos.principal";
+
public static final String HDDS_SCM_HTTP_KERBEROS_KEYTAB_FILE_KEY =
- "hdds.scm.http.kerberos.keytab";
+ HDDS_SCM_HTTP_AUTH_CONFIG_PREFIX + "kerberos.keytab";
}
}
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
index 80b3eb9..b644978 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
@@ -75,4 +75,14 @@ public class StorageContainerManagerHttpServer extends
BaseHttpServer {
return ScmConfigKeys.OZONE_SCM_HTTP_ENABLED_KEY;
}
+ @Override
+ protected String getHttpAuthType() {
+ return SCMHTTPServerConfig.ConfigStrings.HDDS_SCM_HTTP_AUTH_TYPE;
+ }
+
+ @Override
+ protected String getHttpAuthConfigPrefix() {
+ return SCMHTTPServerConfig.ConfigStrings.HDDS_SCM_HTTP_AUTH_CONFIG_PREFIX;
+ }
+
}
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java
index 7af6627..7800d2f 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java
@@ -203,9 +203,14 @@ public final class OMConfigKeys {
public static final String OZONE_OM_KERBEROS_PRINCIPAL_KEY = "ozone.om"
+ ".kerberos.principal";
public static final String OZONE_OM_HTTP_KERBEROS_KEYTAB_FILE =
- "ozone.om.http.kerberos.keytab";
+ "ozone.om.http.auth.kerberos.keytab";
public static final String OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY
- = "ozone.om.http.kerberos.principal";
+ = "ozone.om.http.auth.kerberos.principal";
+ public static final String OZONE_OM_HTTP_AUTH_TYPE =
+ "ozone.om.http.auth.type";
+ public static final String OZONE_OM_HTTP_AUTH_CONFIG_PREFIX =
+ "ozone.om.http.auth.";
+
// Delegation token related keys
public static final String DELEGATION_REMOVER_SCAN_INTERVAL_KEY =
"ozone.manager.delegation.remover.scan.interval";
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
b/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
index 0b08316..4497bb0 100644
--- a/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
@@ -30,15 +30,18 @@
OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/[email protected]
OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
OZONE-SITE.XML_ozone.om.kerberos.principal=om/[email protected]
OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
-OZONE-SITE.XML_ozone.s3g.keytab.file=/etc/security/keytabs/HTTP.keytab
-OZONE-SITE.XML_ozone.s3g.authentication.kerberos.principal=HTTP/[email protected]
OZONE-SITE.XML_ozone.administrators=*
OZONE-SITE.XML_ozone.security.enabled=true
-OZONE-SITE.XML_hdds.scm.http.kerberos.principal=HTTP/[email protected]
-OZONE-SITE.XML_hdds.scm.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-OZONE-SITE.XML_ozone.om.http.kerberos.principal=HTTP/[email protected]
-OZONE-SITE.XML_ozone.om.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_hdds.scm.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_hdds.scm.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.om.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_ozone.om.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_hdds.datanode.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_hdds.datanode.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.s3g.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.s3g.http.auth.kerberos.principal=HTTP/[email protected]
+
HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/[email protected]
HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/[email protected]
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
index 9b78a45..c60ac48 100644
--- a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
@@ -29,16 +29,7 @@ OZONE-SITE.XML_ozone.handler.type=distributed
OZONE-SITE.XML_ozone.scm.client.address=scm
OZONE-SITE.XML_hdds.block.token.enabled=true
OZONE-SITE.XML_ozone.replication=3
-OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/[email protected]
-OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
-OZONE-SITE.XML_ozone.om.kerberos.principal=om/[email protected]
-OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
-OZONE-SITE.XML_ozone.s3g.keytab.file=/etc/security/keytabs/HTTP.keytab
-OZONE-SITE.XML_ozone.s3g.authentication.kerberos.principal=HTTP/[email protected]
-OZONE-SITE.XML_ozone.recon.authentication.kerberos.principal=HTTP/[email protected]
-OZONE-SITE.XML_ozone.recon.http.kerberos.keytab.file=/etc/security/keytabs/HTTP.keytab
-OZONE-SITE.XML_ozone.recon.kerberos.keytab.file=/etc/security/keytabs/recon.keytab
-OZONE-SITE.XML_ozone.recon.kerberos.principal=recon/[email protected]
+
OZONE-SITE.XML_recon.om.snapshot.task.interval.delay=1m
OZONE-SITE.XML_ozone.recon.db.dir=/data/metadata/recon
OZONE-SITE.XML_recon.om.snapshot.task.initial.delay=20s
@@ -48,14 +39,7 @@ OZONE-SITE.XML_ozone.security.enabled=true
OZONE-SITE.XML_ozone.acl.enabled=true
OZONE-SITE.XML_ozone.acl.authorizer.class=org.apache.hadoop.ozone.security.acl.OzoneNativeAuthorizer
OZONE-SITE.XML_ozone.administrators="testuser/[email protected],testuser/[email protected]"
-OZONE-SITE.XML_hdds.scm.http.kerberos.principal=HTTP/[email protected]
-OZONE-SITE.XML_hdds.scm.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-OZONE-SITE.XML_ozone.om.http.kerberos.principal=HTTP/[email protected]
-OZONE-SITE.XML_ozone.om.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/[email protected]
-HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
-HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/[email protected]
-HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+
OZONE-SITE.XML_hdds.datanode.dir=/data/hdds
HDFS-SITE.XML_dfs.datanode.address=0.0.0.0:1019
HDFS-SITE.XML_dfs.datanode.http.address=0.0.0.0:1012
@@ -64,12 +48,46 @@ CORE-SITE.XML_hadoop.security.authentication=kerberos
CORE-SITE.XML_hadoop.security.auth_to_local=RULE:[2:$1@$0](.*)s/.*/root/
CORE-SITE.XML_hadoop.security.key.provider.path=kms://http@kms:9600/kms
+
+OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/[email protected]
+OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
+OZONE-SITE.XML_ozone.om.kerberos.principal=om/[email protected]
+OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
+OZONE-SITE.XML_ozone.recon.kerberos.keytab.file=/etc/security/keytabs/recon.keytab
+OZONE-SITE.XML_ozone.recon.kerberos.principal=recon/[email protected]
+
+HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/[email protected]
+HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
+HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/[email protected]
+HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+
+
+OZONE-SITE.XML_ozone.security.http.kerberos.enabled=true
+OZONE-SITE.XML_ozone.http.filter.initializers=org.apache.hadoop.security.AuthenticationFilterInitializer
+
+OZONE-SITE.XML_ozone.om.http.auth.type=kerberos
+OZONE-SITE.XML_hdds.scm.http.auth.type=kerberos
+OZONE-SITE.XML_hdds.datanode.http.auth.type=kerberos
+OZONE-SITE.XML_ozone.s3g.http.auth.type=kerberos
+OZONE-SITE.XML_ozone.recon.http.auth.type=kerberos
+
+OZONE-SITE.XML_hdds.scm.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_hdds.scm.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.om.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_ozone.om.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_hdds.datanode.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_hdds.datanode.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.s3g.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.s3g.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_ozone.recon.http.auth.kerberos.principal=HTTP/[email protected]
+OZONE-SITE.XML_ozone.recon.http.auth.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+
CORE-SITE.XML_hadoop.http.authentication.simple.anonymous.allowed=false
CORE-SITE.XML_hadoop.http.authentication.signature.secret.file=/etc/security/http_secret
CORE-SITE.XML_hadoop.http.authentication.type=kerberos
CORE-SITE.XML_hadoop.http.authentication.kerberos.principal=HTTP/[email protected]
CORE-SITE.XML_hadoop.http.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-CORE-SITE.XML_hadoop.http.filter.initializers=org.apache.hadoop.security.AuthenticationFilterInitializer
+
CORE-SITE.XML_hadoop.security.authorization=true
HADOOP-POLICY.XML_ozone.om.security.client.protocol.acl=*
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
index 49f7f8d..437d4bc 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.ozone;
import org.apache.hadoop.conf.TestConfigurationFieldsBase;
import org.apache.hadoop.hdds.HddsConfigKeys;
+import org.apache.hadoop.hdds.scm.server.SCMHTTPServerConfig;
import org.apache.hadoop.ozone.om.OMConfigKeys;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.ozone.recon.ReconServerConfigKeys;
@@ -38,7 +39,8 @@ public class TestOzoneConfigurationFields extends
TestConfigurationFieldsBase {
new Class[] {OzoneConfigKeys.class, ScmConfigKeys.class,
OMConfigKeys.class, HddsConfigKeys.class,
ReconServerConfigKeys.class,
- S3GatewayConfigKeys.class
+ S3GatewayConfigKeys.class,
+ SCMHTTPServerConfig.class
};
errorIfMissingConfigProps = true;
errorIfMissingXmlProps = true;
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
index 79f50ea..9fcd85e 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
@@ -34,9 +34,10 @@ public class OzoneManagerHttpServer extends BaseHttpServer {
public OzoneManagerHttpServer(ConfigurationSource conf, OzoneManager om)
throws IOException {
super(conf, "ozoneManager");
- addServlet("serviceList", OZONE_OM_SERVICE_LIST_HTTP_ENDPOINT,
+ // TODO: change back to addServlet when HDDS-3453 is fixed.
+ addInternalServlet("serviceList", OZONE_OM_SERVICE_LIST_HTTP_ENDPOINT,
ServiceListJSONServlet.class);
- addServlet("dbCheckpoint", OZONE_OM_DB_CHECKPOINT_HTTP_ENDPOINT,
+ addInternalServlet("dbCheckpoint", OZONE_OM_DB_CHECKPOINT_HTTP_ENDPOINT,
OMDBCheckpointServlet.class);
getWebAppContext().setAttribute(OzoneConsts.OM_CONTEXT_ATTRIBUTE, om);
}
@@ -80,4 +81,14 @@ public class OzoneManagerHttpServer extends BaseHttpServer {
@Override protected String getEnabledKey() {
return OMConfigKeys.OZONE_OM_HTTP_ENABLED_KEY;
}
+
+ @Override
+ protected String getHttpAuthType() {
+ return OMConfigKeys.OZONE_OM_HTTP_AUTH_TYPE;
+ }
+
+ @Override
+ protected String getHttpAuthConfigPrefix() {
+ return OMConfigKeys.OZONE_OM_HTTP_AUTH_CONFIG_PREFIX;
+ }
}
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ConfigurationProvider.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ConfigurationProvider.java
index 97a4de6..6312365 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ConfigurationProvider.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ConfigurationProvider.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.ozone.recon;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configuration.DeprecationDelta;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import com.google.common.annotations.VisibleForTesting;
@@ -39,9 +40,15 @@ public class ConfigurationProvider implements
private static OzoneConfiguration configuration;
private static void addDeprecations() {
- Configuration.addDeprecation(
- ReconServerConfigKeys.OZONE_RECON_HTTP_KEYTAB_FILE_OLD,
- ReconServerConfigKeys.OZONE_RECON_HTTP_KEYTAB_FILE);
+ Configuration.addDeprecations(new DeprecationDelta[]{
+ new DeprecationDelta("ozone.recon.keytab.file",
+ ReconServerConfigKeys.OZONE_RECON_HTTP_KEYTAB_FILE),
+ new DeprecationDelta(("ozone.recon.http.kerberos.keytab.file"),
+ ReconServerConfigKeys.OZONE_RECON_HTTP_KEYTAB_FILE),
+ new DeprecationDelta("ozone.recon.authentication.kerberos.principal",
+ ReconServerConfigKeys.
+ OZONE_RECON_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL)
+ });
}
@VisibleForTesting
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java
index c66a487..fbf5170 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconHttpServer.java
@@ -85,4 +85,14 @@ public class ReconHttpServer extends BaseHttpServer {
protected String getEnabledKey() {
return ReconServerConfigKeys.OZONE_RECON_HTTP_ENABLED_KEY;
}
+
+ @Override
+ protected String getHttpAuthType() {
+ return ReconServerConfigKeys.OZONE_RECON_HTTP_AUTH_TYPE;
+ }
+
+ @Override
+ protected String getHttpAuthConfigPrefix() {
+ return ReconServerConfigKeys.OZONE_RECON_HTTP_AUTH_CONFIG_PREFIX;
+ }
}
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java
index d2eb8e1..3605896 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java
@@ -39,17 +39,14 @@ public final class ReconServerConfigKeys {
"ozone.recon.http-address";
public static final String OZONE_RECON_HTTPS_ADDRESS_KEY =
"ozone.recon.https-address";
- // Deprecated config
- public static final String OZONE_RECON_HTTP_KEYTAB_FILE_OLD =
- "ozone.recon.keytab.file";
public static final String OZONE_RECON_HTTP_KEYTAB_FILE =
- "ozone.recon.http.kerberos.keytab.file";
+ "ozone.recon.http.auth.kerberos.keytab";
public static final String OZONE_RECON_HTTP_BIND_HOST_DEFAULT =
"0.0.0.0";
public static final int OZONE_RECON_HTTP_BIND_PORT_DEFAULT = 9888;
public static final int OZONE_RECON_HTTPS_BIND_PORT_DEFAULT = 9889;
public static final String OZONE_RECON_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL
=
- "ozone.recon.authentication.kerberos.principal";
+ "ozone.recon.http.auth.kerberos.principal";
public static final String OZONE_RECON_CONTAINER_DB_CACHE_SIZE_MB =
"ozone.recon.container.db.cache.size.mb";
@@ -100,6 +97,14 @@ public final class ReconServerConfigKeys {
"ozone.recon.task.thread.count";
public static final int OZONE_RECON_TASK_THREAD_COUNT_DEFAULT = 5;
+ public static final String OZONE_RECON_HTTP_AUTH_CONFIG_PREFIX =
+ "ozone.recon.http.auth.";
+
+ public static final String OZONE_RECON_HTTP_AUTH_TYPE =
+ OZONE_RECON_HTTP_AUTH_CONFIG_PREFIX + "type";
+
+
+
/**
* Private constructor for utility class.
*/
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayConfigKeys.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayConfigKeys.java
index 2611f50..fae1c82 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayConfigKeys.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayConfigKeys.java
@@ -37,15 +37,21 @@ public final class S3GatewayConfigKeys {
"ozone.s3g.http-address";
public static final String OZONE_S3G_HTTPS_ADDRESS_KEY =
"ozone.s3g.https-address";
- public static final String OZONE_S3G_KEYTAB_FILE =
- "ozone.s3g.keytab.file";
+
public static final String OZONE_S3G_HTTP_BIND_HOST_DEFAULT = "0.0.0.0";
public static final int OZONE_S3G_HTTP_BIND_PORT_DEFAULT = 9878;
public static final int OZONE_S3G_HTTPS_BIND_PORT_DEFAULT = 9879;
- public static final String OZONE_S3G_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL =
- "ozone.s3g.authentication.kerberos.principal";
+
public static final String OZONE_S3G_DOMAIN_NAME = "ozone.s3g.domain.name";
+ public static final String OZONE_S3G_HTTP_AUTH_CONFIG_PREFIX =
+ "ozone.s3g.http.auth.";
+ public static final String OZONE_S3G_HTTP_AUTH_TYPE =
+ OZONE_S3G_HTTP_AUTH_CONFIG_PREFIX + "type";
+ public static final String OZONE_S3G_KEYTAB_FILE =
+ OZONE_S3G_HTTP_AUTH_CONFIG_PREFIX + "kerberos.keytab";
+ public static final String OZONE_S3G_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL =
+ OZONE_S3G_HTTP_AUTH_CONFIG_PREFIX + "kerberos.principal";
/**
* Never constructed.
*/
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
index 9a7d737..d8c0bf0 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
@@ -87,4 +87,14 @@ public class S3GatewayHttpServer extends BaseHttpServer {
return S3GatewayConfigKeys.OZONE_S3G_HTTP_ENABLED_KEY;
}
+ @Override
+ protected String getHttpAuthType() {
+ return S3GatewayConfigKeys.OZONE_S3G_HTTP_AUTH_TYPE;
+ }
+
+ @Override
+ protected String getHttpAuthConfigPrefix() {
+ return S3GatewayConfigKeys.OZONE_S3G_HTTP_AUTH_CONFIG_PREFIX;
+ }
+
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java
index e4e6b1f..d16a3f2 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/FreonHttpServer.java
@@ -71,4 +71,14 @@ public class FreonHttpServer extends BaseHttpServer {
@Override protected String getEnabledKey() {
return OzoneConfigKeys.OZONE_FREON_HTTP_ENABLED_KEY;
}
+
+ @Override
+ protected String getHttpAuthType() {
+ return OzoneConfigKeys.OZONE_FREON_HTTP_AUTH_TYPE;
+ }
+
+ @Override
+ protected String getHttpAuthConfigPrefix() {
+ return OzoneConfigKeys.OZONE_FREON_HTTP_AUTH_CONFIG_PREFIX;
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]