This is an automated email from the ASF dual-hosted git repository.
sammichen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 64302fbe89 HDDS-8050. REST Endpoint for S3 secret manipulation (#4523)
64302fbe89 is described below
commit 64302fbe899e8dd4449c3b9032e5f571dbe8bdc8
Author: Ivan Zlenko <[email protected]>
AuthorDate: Mon Aug 28 13:32:09 2023 +0500
HDDS-8050. REST Endpoint for S3 secret manipulation (#4523)
---
.../org/apache/hadoop/ozone/audit/S3GAction.java | 4 +-
.../common/src/main/resources/ozone-default.xml | 16 +++
hadoop-hdds/docs/content/security/SecuringS3.md | 7 ++
hadoop-hdds/docs/content/security/SecuringS3.zh.md | 7 ++
.../hadoop/hdds/server/http/BaseHttpServer.java | 21 +++-
.../hadoop/hdds/server/http/HttpServer2.java | 41 ++------
.../hdds/server/http/ServletElementsFactory.java | 52 ++++++++++
.../ozone/client/protocol/ClientProtocol.java | 3 +-
.../apache/hadoop/ozone/client/rpc/RpcClient.java | 1 +
.../ozone/om/protocol/OzoneManagerProtocol.java | 2 +
...OzoneManagerProtocolClientSideTranslatorPB.java | 2 +
.../src/main/compose/ozonesecure-ha/docker-config | 3 +
.../src/main/compose/ozonesecure-mr/docker-config | 4 +
.../src/main/compose/ozonesecure/docker-config | 3 +
hadoop-ozone/dist/src/main/smoketest/README.md | 2 +-
.../src/main/smoketest/s3/secretgenerate.robot | 40 ++++++++
.../dist/src/main/smoketest/s3/secretrevoke.robot | 40 ++++++++
.../hadoop/ozone/TestOzoneConfigurationFields.java | 3 +
hadoop-ozone/s3gateway/pom.xml | 5 +
.../apache/hadoop/ozone/s3/GatewayApplication.java | 2 +-
.../hadoop/ozone/s3/S3GatewayHttpServer.java | 84 ++++++++++++++--
.../hadoop/ozone/s3/endpoint/EndpointBase.java | 2 +-
.../hadoop/ozone/s3secret/S3SecretConfigKeys.java | 43 ++++++++
.../hadoop/ozone/s3secret/S3SecretEnabled.java | 53 +++-------
.../S3SecretEnabledEndpointRequestFilter.java | 55 ++++++++++
.../ozone/s3secret/S3SecretEndpointBase.java | 99 ++++++++++++++++++
.../ozone/s3secret/S3SecretGenerateEndpoint.java | 49 +++++++++
.../S3SecretResponse.java} | 36 +++++--
.../ozone/s3secret/S3SecretRevokeEndpoint.java | 69 +++++++++++++
.../package-info.java} | 11 +-
.../hadoop/ozone/client/ClientProtocolStub.java | 6 +-
.../hadoop/ozone/client/ObjectStoreStub.java | 6 ++
.../hadoop/ozone/s3secret/TestSecretGenerate.java | 92 +++++++++++++++++
.../hadoop/ozone/s3secret/TestSecretRevoke.java | 112 +++++++++++++++++++++
34 files changed, 873 insertions(+), 102 deletions(-)
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/S3GAction.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/S3GAction.java
index 5647f38753..dcf116ea0f 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/S3GAction.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/S3GAction.java
@@ -45,7 +45,9 @@ public enum S3GAction implements AuditAction {
COMPLETE_MULTIPART_UPLOAD,
ABORT_MULTIPART_UPLOAD,
DELETE_KEY,
- CREATE_DIRECTORY;
+ CREATE_DIRECTORY,
+ GENERATE_SECRET,
+ REVOKE_SECRET;
@Override
public String getAction() {
diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml
b/hadoop-hdds/common/src/main/resources/ozone-default.xml
index 665ed24686..7736def18b 100644
--- a/hadoop-hdds/common/src/main/resources/ozone-default.xml
+++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml
@@ -1797,6 +1797,22 @@
</description>
</property>
+ <property>
+ <name>ozone.s3g.secret.http.enabled</name>
+ <value>false</value>
+ <tag>OZONE, S3GATEWAY</tag>
+ <description>The boolean which enables the Ozone S3Gateway Secret endpoint.
+ </description>
+ </property>
+ <property>
+ <name>ozone.s3g.secret.http.auth.type</name>
+ <value>kerberos</value>
+ <tag>S3GATEWAY, SECURITY, KERBEROS</tag>
+ <description> simple or kerberos. If kerberos is set, Kerberos SPNEOGO
+ will be used for http authentication.
+ </description>
+ </property>
+
<property>
<name>ozone.om.save.metrics.interval</name>
<value>5m</value>
diff --git a/hadoop-hdds/docs/content/security/SecuringS3.md
b/hadoop-hdds/docs/content/security/SecuringS3.md
index 86e1fc8534..21af373435 100644
--- a/hadoop-hdds/docs/content/security/SecuringS3.md
+++ b/hadoop-hdds/docs/content/security/SecuringS3.md
@@ -41,6 +41,13 @@ The user needs to `kinit` first and once they have
authenticated via kerberos
```bash
ozone s3 getsecret
```
+
+* Or by sending request to /secret/generate S3 REST endpoint.
+
+```bash
+curl -X POST --negotiate -u : https://localhost:9879/secret/generate
+```
+
This command will talk to ozone, validate the user via Kerberos and generate
the AWS credentials. The values will be printed out on the screen. You can
set these values up in your _.aws_ file for automatic access while working
diff --git a/hadoop-hdds/docs/content/security/SecuringS3.zh.md
b/hadoop-hdds/docs/content/security/SecuringS3.zh.md
index 66bc7778be..ff27f2de56 100644
--- a/hadoop-hdds/docs/content/security/SecuringS3.zh.md
+++ b/hadoop-hdds/docs/content/security/SecuringS3.zh.md
@@ -35,6 +35,13 @@ icon: cloud
```bash
ozone s3 getsecret
```
+
+* 或者通过向 /secret/generate S3 REST 端点发送请求。
+
+```bash
+curl -X POST --negotiate -u : https://localhost:9879/secret/generate
+```
+
这条命令会与 Ozone 进行通信,对用户进行 Kerberos 认证并生成 AWS 凭据,结果会直接打印在屏幕上,你可以将其配置在 _.aws._
文件中,这样可以在操作 Ozone S3 桶时自动进行认证。
<div class="alert alert-danger" role="alert">
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
index 1e1b9bd66a..5d65634b44 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/BaseHttpServer.java
@@ -17,6 +17,7 @@
package org.apache.hadoop.hdds.server.http;
+import java.util.Map;
import javax.servlet.http.HttpServlet;
import java.io.IOException;
import java.net.InetSocketAddress;
@@ -106,8 +107,6 @@ public abstract class BaseHttpServer {
HttpServer2.Builder builder = newHttpServer2BuilderForOzone(
conf, httpAddress, httpsAddress, name);
- boolean isSecurityEnabled = UserGroupInformation.isSecurityEnabled() &&
- OzoneSecurityUtil.isHttpSecurityEnabled(conf);
LOG.info("Hadoop Security Enabled: {} " +
"Ozone Security Enabled: {} " +
"Ozone HTTP Security Enabled: {} ",
@@ -117,7 +116,7 @@ public abstract class BaseHttpServer {
conf.getBoolean(OZONE_HTTP_SECURITY_ENABLED_KEY,
OZONE_HTTP_SECURITY_ENABLED_DEFAULT));
- if (isSecurityEnabled) {
+ if (isSecurityEnabled()) {
String httpAuthType = conf.get(getHttpAuthType(), "simple");
LOG.info("HttpAuthType: {} = {}", getHttpAuthType(), httpAuthType);
// Ozone config prefix must be set to avoid AuthenticationFilter
@@ -257,6 +256,17 @@ public abstract class BaseHttpServer {
httpServer.addInternalServlet(servletName, pathSpec, clazz);
}
+ /**
+ * Add a filter to BaseHttpServer.
+ *
+ * @param filterName The name of the filter
+ * @param classname The filter class
+ * @param parameters The filter parameters
+ */
+ protected void addFilter(String filterName, String classname,
+ Map<String, String> parameters) {
+ httpServer.addFilter(filterName, classname, parameters);
+ }
/**
* Returns the WebAppContext associated with this HttpServer.
@@ -436,6 +446,11 @@ public abstract class BaseHttpServer {
return httpsAddress;
}
+ public boolean isSecurityEnabled() {
+ return UserGroupInformation.isSecurityEnabled() &&
+ OzoneSecurityUtil.isHttpSecurityEnabled(conf);
+ }
+
protected abstract String getHttpAddressKey();
protected abstract String getHttpsAddressKey();
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
index d9330bd360..24a2beac92 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
@@ -110,6 +110,8 @@ import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static
org.apache.hadoop.hdds.server.http.ServletElementsFactory.createFilterHolder;
+import static
org.apache.hadoop.hdds.server.http.ServletElementsFactory.createFilterMapping;
import static
org.apache.hadoop.security.AuthenticationFilterInitializer.getFilterConfigMap;
/**
@@ -707,7 +709,7 @@ public final class HttpServer2 implements FilterContainer {
private static void addNoCacheFilter(ServletContextHandler ctxt) {
defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
- Collections.<String, String>emptyMap(), new String[] {"/*"});
+ Collections.emptyMap(), new String[] {"/*"});
}
/**
@@ -758,7 +760,6 @@ public final class HttpServer2 implements FilterContainer {
if (conf.getBoolean(
CommonConfigurationKeys.HADOOP_JETTY_LOGS_SERVE_ALIASES,
CommonConfigurationKeys.DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) {
- @SuppressWarnings("unchecked")
Map<String, String> params = logContext.getInitParams();
params.put("org.eclipse.jetty.servlet.Default.aliases", "true");
}
@@ -777,7 +778,6 @@ public final class HttpServer2 implements FilterContainer {
staticContext.setResourceBase(appDir + "/static");
staticContext.addServlet(DefaultServlet.class, "/*");
staticContext.setDisplayName("static");
- @SuppressWarnings("unchecked")
Map<String, String> params = staticContext.getInitParams();
params.put("org.eclipse.jetty.servlet.Default.dirAllowed", "false");
params.put("org.eclipse.jetty.servlet.Default.gzip", "true");
@@ -1000,13 +1000,13 @@ public final class HttpServer2 implements
FilterContainer {
public void addFilter(String name, String classname,
Map<String, String> parameters) {
- FilterHolder filterHolder = getFilterHolder(name, classname, parameters);
+ FilterHolder filterHolder = createFilterHolder(name, classname,
parameters);
FilterMapping fmap =
- getFilterMapping(name, new String[] {"*.html", "*.jsp"});
+ createFilterMapping(name, new String[] {"*.html", "*.jsp"});
defineFilter(webAppContext, filterHolder, fmap);
LOG.info("Added filter {} (class={}) to context {}", name, classname,
webAppContext.getDisplayName());
- fmap = getFilterMapping(name, new String[] {"/*"});
+ fmap = createFilterMapping(name, new String[] {"/*"});
for (Map.Entry<ServletContextHandler, Boolean> e
: defaultContexts.entrySet()) {
if (e.getValue()) {
@@ -1022,8 +1022,8 @@ public final class HttpServer2 implements FilterContainer
{
@Override
public void addGlobalFilter(String name, String classname,
Map<String, String> parameters) {
- FilterHolder filterHolder = getFilterHolder(name, classname, parameters);
- FilterMapping fmap = getFilterMapping(name, new String[] {"/*"});
+ FilterHolder filterHolder = createFilterHolder(name, classname,
parameters);
+ FilterMapping fmap = createFilterMapping(name, new String[] {"/*"});
defineFilter(webAppContext, filterHolder, fmap);
for (ServletContextHandler ctx : defaultContexts.keySet()) {
defineFilter(ctx, filterHolder, fmap);
@@ -1036,8 +1036,8 @@ public final class HttpServer2 implements FilterContainer
{
*/
private static void defineFilter(ServletContextHandler ctx, String name,
String classname, Map<String, String> parameters, String[] urls) {
- FilterHolder filterHolder = getFilterHolder(name, classname, parameters);
- FilterMapping fmap = getFilterMapping(name, urls);
+ FilterHolder filterHolder = createFilterHolder(name, classname,
parameters);
+ FilterMapping fmap = createFilterMapping(name, urls);
defineFilter(ctx, filterHolder, fmap);
}
@@ -1050,25 +1050,6 @@ public final class HttpServer2 implements
FilterContainer {
handler.addFilter(holder, fmap);
}
- private static FilterMapping getFilterMapping(String name, String[] urls) {
- FilterMapping fmap = new FilterMapping();
- fmap.setPathSpecs(urls);
- fmap.setDispatches(FilterMapping.ALL);
- fmap.setFilterName(name);
- return fmap;
- }
-
- private static FilterHolder getFilterHolder(String name, String classname,
- Map<String, String> parameters) {
- FilterHolder holder = new FilterHolder();
- holder.setName(name);
- holder.setClassName(classname);
- if (parameters != null) {
- holder.setInitParameters(parameters);
- }
- return holder;
- }
-
/**
* Add the path spec to the filter path mapping.
* @param pathSpec The path spec
@@ -1569,7 +1550,6 @@ public final class HttpServer2 implements FilterContainer
{
/**
* Return the set of parameter names, quoting each name.
*/
- @SuppressWarnings("unchecked")
@Override
public Enumeration<String> getParameterNames() {
return new Enumeration<String>() {
@@ -1610,7 +1590,6 @@ public final class HttpServer2 implements FilterContainer
{
return result;
}
- @SuppressWarnings("unchecked")
@Override
public Map<String, String[]> getParameterMap() {
Map<String, String[]> result = new HashMap<>();
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ServletElementsFactory.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ServletElementsFactory.java
new file mode 100644
index 0000000000..0271dee803
--- /dev/null
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ServletElementsFactory.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.server.http;
+
+import java.util.Map;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.FilterMapping;
+
+/**
+ * Factory class which helps to create different types of servlet elements.
+ */
+public final class ServletElementsFactory {
+ private ServletElementsFactory() {
+ throw new UnsupportedOperationException(
+ "This is utility class and cannot be instantiated");
+ }
+
+ public static FilterMapping createFilterMapping(String mappingName,
+ String[] urls) {
+ FilterMapping filterMapping = new FilterMapping();
+ filterMapping.setPathSpecs(urls);
+ filterMapping.setDispatches(FilterMapping.ALL);
+ filterMapping.setFilterName(mappingName);
+ return filterMapping;
+ }
+
+ public static FilterHolder createFilterHolder(String filterName,
+ String classname, Map<String, String> parameters) {
+ FilterHolder holder = new FilterHolder();
+ holder.setName(filterName);
+ holder.setClassName(classname);
+ if (parameters != null) {
+ holder.setInitParameters(parameters);
+ }
+ return holder;
+ }
+}
diff --git
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
index 16d75f1317..b04adb4ef3 100644
---
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
+++
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
@@ -23,6 +23,7 @@ import java.net.URI;
import java.util.List;
import java.util.Map;
+import javax.annotation.Nonnull;
import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicationFactor;
@@ -624,7 +625,7 @@ public interface ClientProtocol {
* @return S3SecretValue
* @throws IOException
*/
- S3SecretValue getS3Secret(String kerberosID) throws IOException;
+ @Nonnull S3SecretValue getS3Secret(String kerberosID) throws IOException;
/**
* Returns S3 Secret given kerberos user.
diff --git
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/RpcClient.java
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/RpcClient.java
index 15ff53c200..984c6e70b1 100644
---
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/RpcClient.java
+++
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/RpcClient.java
@@ -819,6 +819,7 @@ public class RpcClient implements ClientProtocol {
* @throws IOException
*/
@Override
+ @Nonnull
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
Preconditions.checkArgument(StringUtils.isNotBlank(kerberosID),
"kerberosID cannot be null or empty.");
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocol/OzoneManagerProtocol.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocol/OzoneManagerProtocol.java
index ca25dd80e8..0a712c9387 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocol/OzoneManagerProtocol.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocol/OzoneManagerProtocol.java
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.List;
import java.util.UUID;
+import javax.annotation.Nonnull;
import org.apache.hadoop.fs.SafeModeAction;
import org.apache.hadoop.hdds.scm.container.common.helpers.ExcludeList;
import org.apache.hadoop.ozone.OzoneAcl;
@@ -573,6 +574,7 @@ public interface OzoneManagerProtocol
* @return S3SecretValue
* @throws IOException
*/
+ @Nonnull
default S3SecretValue getS3Secret(String kerberosID) throws IOException {
throw new UnsupportedOperationException("OzoneManager does not require " +
"this to be implemented, as write requests use a new approach.");
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocolPB/OzoneManagerProtocolClientSideTranslatorPB.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocolPB/OzoneManagerProtocolClientSideTranslatorPB.java
index 0b5c1f4405..813b22e5a3 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocolPB/OzoneManagerProtocolClientSideTranslatorPB.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/protocolPB/OzoneManagerProtocolClientSideTranslatorPB.java
@@ -25,6 +25,7 @@ import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;
+import javax.annotation.Nonnull;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.SafeModeAction;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
@@ -1005,6 +1006,7 @@ public final class
OzoneManagerProtocolClientSideTranslatorPB
}
@Override
+ @Nonnull
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
GetS3SecretRequest request = GetS3SecretRequest.newBuilder()
.setKerberosID(kerberosID)
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/docker-config
b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/docker-config
index b57e5913e7..55c0b6064b 100644
--- a/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/docker-config
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/docker-config
@@ -68,6 +68,7 @@ OZONE-SITE.XML_ozone.security.enabled=true
OZONE-SITE.XML_ozone.acl.enabled=true
OZONE-SITE.XML_ozone.acl.authorizer.class=org.apache.hadoop.ozone.security.acl.OzoneNativeAuthorizer
OZONE-SITE.XML_ozone.administrators="testuser,recon,om"
+OZONE-SITE.XML_ozone.s3.administrators="testuser,s3g"
OZONE-SITE.XML_hdds.datanode.dir=/data/hdds
HDFS-SITE.XML_dfs.datanode.address=0.0.0.0:1019
@@ -98,12 +99,14 @@
HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.
OZONE-SITE.XML_ozone.security.http.kerberos.enabled=true
+OZONE-SITE.XML_ozone.s3g.secret.http.enabled=true
OZONE-SITE.XML_ozone.http.filter.initializers=org.apache.hadoop.security.AuthenticationFilterInitializer
OZONE-SITE.XML_ozone.om.http.auth.type=kerberos
OZONE-SITE.XML_hdds.scm.http.auth.type=kerberos
OZONE-SITE.XML_hdds.datanode.http.auth.type=kerberos
OZONE-SITE.XML_ozone.s3g.http.auth.type=kerberos
+OZONE-SITE.XML_ozone.s3g.secret.http.auth.type=kerberos
OZONE-SITE.XML_ozone.httpfs.http.auth.type=kerberos
OZONE-SITE.XML_ozone.recon.http.auth.type=kerberos
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
b/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
index f279e75ca4..d44ed296d8 100644
--- a/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure-mr/docker-config
@@ -36,8 +36,12 @@
OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
OZONE-SITE.XML_ozone.om.kerberos.principal=om/[email protected]
OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
OZONE-SITE.XML_ozone.administrators=*
+OZONE-SITE.XML_ozone.s3.administrators="s3g"
OZONE-SITE.XML_ozone.security.enabled=true
+OZONE-SITE.XML_ozone.security.http.kerberos.enabled=true
+OZONE-SITE.XML_ozone.s3g.secret.http.enabled=true
+
OZONE-SITE.XML_hdds.scm.http.auth.kerberos.principal=HTTP/[email protected]
OZONE-SITE.XML_hdds.scm.http.auth.kerberos.keytab=/etc/security/keytabs/scm.keytab
OZONE-SITE.XML_ozone.om.http.auth.kerberos.principal=HTTP/[email protected]
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
index dc4cae8bb8..2cef12f802 100644
--- a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
@@ -49,6 +49,7 @@ OZONE-SITE.XML_ozone.acl.enabled=true
OZONE-SITE.XML_ozone.acl.authorizer.class=org.apache.hadoop.ozone.security.acl.OzoneNativeAuthorizer
OZONE-SITE.XML_ozone.administrators="testuser,recon"
OZONE-SITE.XML_ozone.recon.administrators="testuser2"
+OZONE-SITE.XML_ozone.s3.administrators="testuser,s3g"
OZONE-SITE.XML_hdds.datanode.dir=/data/hdds
HDFS-SITE.XML_dfs.datanode.address=0.0.0.0:1019
@@ -87,12 +88,14 @@
HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.
OZONE-SITE.XML_ozone.security.http.kerberos.enabled=true
+OZONE-SITE.XML_ozone.s3g.secret.http.enabled=true
OZONE-SITE.XML_ozone.http.filter.initializers=org.apache.hadoop.security.AuthenticationFilterInitializer
OZONE-SITE.XML_ozone.om.http.auth.type=kerberos
OZONE-SITE.XML_hdds.scm.http.auth.type=kerberos
OZONE-SITE.XML_hdds.datanode.http.auth.type=kerberos
OZONE-SITE.XML_ozone.s3g.http.auth.type=kerberos
+OZONE-SITE.XML_ozone.s3g.secret.http.auth.type=kerberos
OZONE-SITE.XML_ozone.httpfs.http.auth.type=kerberos
OZONE-SITE.XML_ozone.recon.http.auth.type=kerberos
diff --git a/hadoop-ozone/dist/src/main/smoketest/README.md
b/hadoop-ozone/dist/src/main/smoketest/README.md
index e46ef8c242..296aa1b6b5 100644
--- a/hadoop-ozone/dist/src/main/smoketest/README.md
+++ b/hadoop-ozone/dist/src/main/smoketest/README.md
@@ -16,7 +16,7 @@
This directory contains a [robotframework](http://robotframework.org/) based
test suite for Ozone to make it easier to check the current state of the
package.
-You can run in in any environment after
[installing](https://github.com/robotframework/robotframework/blob/master/INSTALL.rst)
+You can run in any environment after
[installing](https://github.com/robotframework/robotframework/blob/master/INSTALL.rst)
```
cd $DIRECTORY_OF_OZONE
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/secretgenerate.robot
b/hadoop-ozone/dist/src/main/smoketest/s3/secretgenerate.robot
new file mode 100644
index 0000000000..8224d9ac02
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/secretgenerate.robot
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+*** Settings ***
+Documentation S3 Secret Generate test
+Library OperatingSystem
+Library String
+Resource ../commonlib.robot
+Resource ./commonawslib.robot
+Test Timeout 5 minutes
+Suite Setup Setup s3 tests
+Default Tags no-bucket-type
+
+*** Variables ***
+${ENDPOINT_URL} http://s3g:9878
+
+*** Test Cases ***
+
+S3 Gateway Generate Secret
+ Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit HTTP user
+ ${result} = Execute curl -X POST
--negotiate -u : -v ${ENDPOINT_URL}/secret/generate
+ IF '${SECURITY_ENABLED}' == 'true'
+ Should contain ${result} HTTP/1.1
200 OK ignore_case=True
+ Should Match Regexp ${result}
<awsAccessKey>.*</awsAccessKey><awsSecret>.*</awsSecret>
+ ELSE
+ Should contain ${result} S3 Secret
endpoint is disabled.
+ END
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/secretrevoke.robot
b/hadoop-ozone/dist/src/main/smoketest/s3/secretrevoke.robot
new file mode 100644
index 0000000000..4cc21b3b8b
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/secretrevoke.robot
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+*** Settings ***
+Documentation S3 Secret Revoke test
+Library OperatingSystem
+Library String
+Resource ../commonlib.robot
+Resource ./commonawslib.robot
+Test Timeout 5 minutes
+Suite Setup Setup s3 tests
+Default Tags no-bucket-type
+
+*** Variables ***
+${ENDPOINT_URL} http://s3g:9878
+${SECURITY_ENABLED} true
+
+*** Test Cases ***
+
+S3 Gateway Revoke Secret
+ Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit HTTP user
+ ${result} = Execute curl -X POST
--negotiate -u : -v ${ENDPOINT_URL}/secret/revoke
+ IF '${SECURITY_ENABLED}' == 'true'
+ Should contain ${result} HTTP/1.1 200
OK ignore_case=True
+ ELSE
+ Should contain ${result} S3 Secret
endpoint is disabled.
+ END
\ No newline at end of file
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
index 6c3d2629e2..b91f9824a4 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.ozone.recon.ReconServerConfigKeys;
import org.apache.hadoop.ozone.s3.S3GatewayConfigKeys;
import java.util.Arrays;
+
+import org.apache.hadoop.ozone.s3secret.S3SecretConfigKeys;
import org.junit.Rule;
import org.junit.rules.Timeout;
@@ -51,6 +53,7 @@ public class TestOzoneConfigurationFields extends
TestConfigurationFieldsBase {
OMConfigKeys.class, HddsConfigKeys.class,
ReconConfigKeys.class, ReconServerConfigKeys.class,
S3GatewayConfigKeys.class,
+ S3SecretConfigKeys.class,
SCMHTTPServerConfig.class,
SCMHTTPServerConfig.ConfigStrings.class,
ScmConfig.ConfigStrings.class
diff --git a/hadoop-ozone/s3gateway/pom.xml b/hadoop-ozone/s3gateway/pom.xml
index 430cb03f63..cfa534b1b4 100644
--- a/hadoop-ozone/s3gateway/pom.xml
+++ b/hadoop-ozone/s3gateway/pom.xml
@@ -168,6 +168,11 @@
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-junit-jupiter</artifactId>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
index c5a291b445..778b375a66 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
@@ -24,6 +24,6 @@ import org.glassfish.jersey.server.ResourceConfig;
*/
public class GatewayApplication extends ResourceConfig {
public GatewayApplication() {
- packages("org.apache.hadoop.ozone.s3");
+ packages("org.apache.hadoop.ozone.s3", "org.apache.hadoop.ozone.s3secret");
}
}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
index 9ff94453c0..97117a30bb 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/S3GatewayHttpServer.java
@@ -17,30 +17,94 @@
*/
package org.apache.hadoop.ozone.s3;
+import com.google.common.base.Strings;
import java.io.IOException;
-
-import org.apache.hadoop.hdds.conf.MutableConfigurationSource;
-import org.apache.hadoop.hdds.server.http.BaseHttpServer;
-
+import java.util.HashMap;
+import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.hdds.conf.MutableConfigurationSource;
+import org.apache.hadoop.hdds.server.http.BaseHttpServer;
+import org.apache.hadoop.hdds.server.http.ServletElementsFactory;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.FilterMapping;
+import org.eclipse.jetty.servlet.ServletHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static
org.apache.hadoop.ozone.s3.S3GatewayConfigKeys.OZONE_S3G_HTTP_BIND_HOST_KEY;
+import static
org.apache.hadoop.ozone.s3.S3GatewayConfigKeys.OZONE_S3G_KEYTAB_FILE;
+import static
org.apache.hadoop.ozone.s3.S3GatewayConfigKeys.OZONE_S3G_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL;
+import static
org.apache.hadoop.ozone.s3secret.S3SecretConfigKeys.OZONE_S3G_SECRET_HTTP_AUTH_TYPE_KEY;
+import static
org.apache.hadoop.ozone.s3secret.S3SecretConfigKeys.OZONE_S3G_SECRET_HTTP_AUTH_TYPE_DEFAULT;
+import static
org.apache.hadoop.ozone.s3secret.S3SecretConfigKeys.OZONE_S3G_SECRET_HTTP_ENABLED_KEY;
+import static
org.apache.hadoop.ozone.s3secret.S3SecretConfigKeys.OZONE_S3G_SECRET_HTTP_ENABLED_KEY_DEFAULT;
/**
- * S3 Gateway specific configuration keys.
+ * Http server to provide S3-compatible API.
*/
public class S3GatewayHttpServer extends BaseHttpServer {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(S3GatewayHttpServer.class);
+
/**
* Default offset between two filters.
*/
public static final int FILTER_PRIORITY_DO_AFTER = 50;
- public S3GatewayHttpServer(MutableConfigurationSource conf,
- String name) throws IOException {
+ public S3GatewayHttpServer(MutableConfigurationSource conf, String name)
+ throws IOException {
super(conf, name);
addServlet("icon", "/favicon.ico", IconServlet.class);
+ addSecretAuthentication(conf);
+ }
+
+ private void addSecretAuthentication(MutableConfigurationSource conf)
+ throws IOException {
+
+ if (conf.getBoolean(OZONE_S3G_SECRET_HTTP_ENABLED_KEY,
+ OZONE_S3G_SECRET_HTTP_ENABLED_KEY_DEFAULT)) {
+ String authType = conf.get(OZONE_S3G_SECRET_HTTP_AUTH_TYPE_KEY,
+ OZONE_S3G_SECRET_HTTP_AUTH_TYPE_DEFAULT);
+
+ if (UserGroupInformation.isSecurityEnabled()
+ && authType.equals("kerberos")) {
+ ServletHandler handler = getWebAppContext().getServletHandler();
+ Map<String, String> params = new HashMap<>();
+
+ String principalInConf =
+ conf.get(OZONE_S3G_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL);
+ if (!Strings.isNullOrEmpty(principalInConf)) {
+ params.put("kerberos.principal", SecurityUtil.getServerPrincipal(
+ principalInConf, conf.get(OZONE_S3G_HTTP_BIND_HOST_KEY)));
+ }
+ String httpKeytab = conf.get(OZONE_S3G_KEYTAB_FILE);
+ if (!Strings.isNullOrEmpty(httpKeytab)) {
+ params.put("kerberos.keytab", httpKeytab);
+ }
+ params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
+
+ FilterHolder holder = ServletElementsFactory.createFilterHolder(
+ "secretAuthentication", AuthenticationFilter.class.getName(),
+ params);
+ FilterMapping filterMapping =
+ ServletElementsFactory.createFilterMapping(
+ "secretAuthentication",
+ new String[]{"/secret/*"});
+
+ handler.addFilter(holder, filterMapping);
+ } else {
+ LOG.error("Secret Endpoint should be secured with Kerberos");
+ throw new IllegalStateException("Secret Endpoint should be secured"
+ + " with Kerberos");
+ }
+ }
}
@Override
@@ -50,7 +114,7 @@ public class S3GatewayHttpServer extends BaseHttpServer {
@Override
protected String getHttpBindHostKey() {
- return S3GatewayConfigKeys.OZONE_S3G_HTTP_BIND_HOST_KEY;
+ return OZONE_S3G_HTTP_BIND_HOST_KEY;
}
@Override
@@ -80,12 +144,12 @@ public class S3GatewayHttpServer extends BaseHttpServer {
@Override
protected String getKeytabFile() {
- return S3GatewayConfigKeys.OZONE_S3G_KEYTAB_FILE;
+ return OZONE_S3G_KEYTAB_FILE;
}
@Override
protected String getSpnegoPrincipal() {
- return S3GatewayConfigKeys.OZONE_S3G_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL;
+ return OZONE_S3G_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL;
}
@Override
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java
index 836d026975..b04bfaa4de 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java
@@ -80,7 +80,7 @@ public abstract class EndpointBase implements Auditor {
private ContainerRequestContext context;
private Set<String> excludeMetadataFields =
- new HashSet<>(Arrays.asList(OzoneConsts.GDPR_FLAG));
+ new HashSet<>(Arrays.asList(OzoneConsts.GDPR_FLAG));
private static final Logger LOG =
LoggerFactory.getLogger(EndpointBase.class);
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretConfigKeys.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretConfigKeys.java
new file mode 100644
index 0000000000..21f71c0746
--- /dev/null
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretConfigKeys.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.s3secret;
+
+/**
+ * This class contains constants for configuration keys used
+ * in S3 secret endpoint.
+ */
+public final class S3SecretConfigKeys {
+ public static final String OZONE_S3G_SECRET_HTTP_ENABLED_KEY =
+ "ozone.s3g.secret.http.enabled";
+ public static final boolean OZONE_S3G_SECRET_HTTP_ENABLED_KEY_DEFAULT =
+ false;
+ public static final String OZONE_S3G_SECRET_HTTP_AUTH_CONFIG_PREFIX =
+ "ozone.s3g.secret.http.auth.";
+ public static final String OZONE_S3G_SECRET_HTTP_AUTH_TYPE_KEY =
+ OZONE_S3G_SECRET_HTTP_AUTH_CONFIG_PREFIX + "type";
+ public static final String OZONE_S3G_SECRET_HTTP_AUTH_TYPE_DEFAULT =
+ "kerberos";
+
+ /**
+ * Never constructed.
+ */
+ private S3SecretConfigKeys() {
+
+ }
+}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/S3GAction.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEnabled.java
similarity index 55%
copy from
hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/S3GAction.java
copy to
hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEnabled.java
index 5647f38753..36d4fd748b 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/audit/S3GAction.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEnabled.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
@@ -9,47 +9,26 @@
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
under
* the License.
+ *
*/
-package org.apache.hadoop.ozone.audit;
-
-/**
- * Enum to define Audit Action types for S3Gateway.
- */
-public enum S3GAction implements AuditAction {
- //BucketEndpoint
- GET_BUCKET,
- CREATE_BUCKET,
- HEAD_BUCKET,
- DELETE_BUCKET,
- GET_ACL,
- PUT_ACL,
- LIST_MULTIPART_UPLOAD,
- MULTI_DELETE,
+package org.apache.hadoop.ozone.s3secret;
- //RootEndpoint
- LIST_S3_BUCKETS,
-
- //ObjectEndpoint
- CREATE_MULTIPART_KEY,
- COPY_OBJECT,
- CREATE_KEY,
- LIST_PARTS,
- GET_KEY,
- HEAD_KEY,
- INIT_MULTIPART_UPLOAD,
- COMPLETE_MULTIPART_UPLOAD,
- ABORT_MULTIPART_UPLOAD,
- DELETE_KEY,
- CREATE_DIRECTORY;
-
- @Override
- public String getAction() {
- return this.toString();
- }
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import javax.ws.rs.NameBinding;
+/**
+ * Annotation to disable S3 Secure Endpoint.
+ */
+@NameBinding
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+public @interface S3SecretEnabled {
}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEnabledEndpointRequestFilter.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEnabledEndpointRequestFilter.java
new file mode 100644
index 0000000000..d75e3829da
--- /dev/null
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEnabledEndpointRequestFilter.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.s3secret;
+
+import java.io.IOException;
+import javax.inject.Inject;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.Provider;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+
+import static
org.apache.hadoop.ozone.s3secret.S3SecretConfigKeys.OZONE_S3G_SECRET_HTTP_ENABLED_KEY;
+
+/**
+ * Filter that disables all endpoints annotated with {@link S3SecretEnabled}.
+ * Condition is based on the value of the configuration key
+ * ozone.s3g.secret.http.enabled.
+ */
+@S3SecretEnabled
+@Provider
+public class S3SecretEnabledEndpointRequestFilter
+ implements ContainerRequestFilter {
+
+ @Inject
+ private OzoneConfiguration ozoneConfiguration;
+
+ @Override
+ public void filter(ContainerRequestContext requestContext)
+ throws IOException {
+ boolean isSecretEnabled = ozoneConfiguration.getBoolean(
+ OZONE_S3G_SECRET_HTTP_ENABLED_KEY, false);
+ if (!isSecretEnabled) {
+ requestContext.abortWith(Response.status(Response.Status.BAD_REQUEST)
+ .entity("S3 Secret endpoint is disabled.")
+ .build());
+ }
+ }
+}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEndpointBase.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEndpointBase.java
new file mode 100644
index 0000000000..dc1ad0eb0c
--- /dev/null
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretEndpointBase.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.s3secret;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.ozone.audit.AuditAction;
+import org.apache.hadoop.ozone.audit.AuditEventStatus;
+import org.apache.hadoop.ozone.audit.AuditLogger;
+import org.apache.hadoop.ozone.audit.AuditLoggerType;
+import org.apache.hadoop.ozone.audit.AuditMessage;
+import org.apache.hadoop.ozone.audit.Auditor;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.s3.util.AuditUtils;
+
+import javax.inject.Inject;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.core.Context;
+import java.util.Map;
+
+/**
+ * Base implementation of endpoint for working with S3 secret.
+ */
+public class S3SecretEndpointBase implements Auditor {
+
+ @Context
+ private ContainerRequestContext context;
+
+ @Inject
+ private OzoneClient client;
+
+ protected static final AuditLogger AUDIT =
+ new AuditLogger(AuditLoggerType.S3GLOGGER);
+
+ protected String userNameFromRequest() {
+ return context.getSecurityContext().getUserPrincipal().getName();
+ }
+
+ private AuditMessage.Builder auditMessageBaseBuilder(AuditAction op,
+ Map<String, String> auditMap) {
+ AuditMessage.Builder builder = new AuditMessage.Builder()
+ .forOperation(op)
+ .withParams(auditMap);
+ if (context != null) {
+ builder.atIp(AuditUtils.getClientIpAddress(context));
+ }
+ return builder;
+ }
+
+ @Override
+ public AuditMessage buildAuditMessageForSuccess(AuditAction op,
+ Map<String, String> auditMap) {
+ AuditMessage.Builder builder = auditMessageBaseBuilder(op, auditMap)
+ .withResult(AuditEventStatus.SUCCESS);
+ return builder.build();
+ }
+
+ @Override
+ public AuditMessage buildAuditMessageForFailure(AuditAction op,
+ Map<String, String> auditMap, Throwable throwable) {
+ AuditMessage.Builder builder = auditMessageBaseBuilder(op, auditMap)
+ .withResult(AuditEventStatus.FAILURE)
+ .withException(throwable);
+ return builder.build();
+ }
+
+ public OzoneClient getClient() {
+ return client;
+ }
+
+ @VisibleForTesting
+ public void setClient(OzoneClient ozoneClient) {
+ this.client = ozoneClient;
+ }
+
+ @VisibleForTesting
+ public void setContext(ContainerRequestContext context) {
+ this.context = context;
+ }
+
+ protected Map<String, String> getAuditParameters() {
+ return AuditUtils.getAuditParameters(context);
+ }
+}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretGenerateEndpoint.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretGenerateEndpoint.java
new file mode 100644
index 0000000000..4fe9fd47fb
--- /dev/null
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretGenerateEndpoint.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.s3secret;
+
+import org.apache.hadoop.ozone.audit.S3GAction;
+import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
+
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+
+/**
+ * Endpoint to generate and return S3 secret.
+ */
+@Path("/secret/generate")
+@S3SecretEnabled
+public class S3SecretGenerateEndpoint extends S3SecretEndpointBase {
+ @POST
+ public Response generate() throws IOException {
+ S3SecretResponse s3SecretResponse = new S3SecretResponse();
+ S3SecretValue s3SecretValue = generateS3Secret();
+ s3SecretResponse.setAwsSecret(s3SecretValue.getAwsSecret());
+ s3SecretResponse.setAwsAccessKey(s3SecretValue.getAwsAccessKey());
+ AUDIT.logReadSuccess(buildAuditMessageForSuccess(
+ S3GAction.GENERATE_SECRET, getAuditParameters()));
+ return Response.ok(s3SecretResponse).build();
+ }
+
+ private S3SecretValue generateS3Secret() throws IOException {
+ return getClient().getObjectStore().getS3Secret(userNameFromRequest());
+ }
+}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretResponse.java
similarity index 50%
copy from
hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
copy to
hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretResponse.java
index c5a291b445..963f1877a5 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretResponse.java
@@ -15,15 +15,39 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.ozone.s3;
-import org.glassfish.jersey.server.ResourceConfig;
+package org.apache.hadoop.ozone.s3secret;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
/**
- * JaxRS resource definition.
+ * Response with S3 secrets.
*/
-public class GatewayApplication extends ResourceConfig {
- public GatewayApplication() {
- packages("org.apache.hadoop.ozone.s3");
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlRootElement(name = "S3Secret")
+public class S3SecretResponse {
+ @XmlElement(name = "awsAccessKey")
+ private String awsAccessKey;
+
+ @XmlElement(name = "awsSecret")
+ private String awsSecret;
+
+ public String getAwsAccessKey() {
+ return awsAccessKey;
+ }
+
+ public String getAwsSecret() {
+ return awsSecret;
+ }
+
+ public void setAwsAccessKey(String awsAccessKey) {
+ this.awsAccessKey = awsAccessKey;
+ }
+
+ public void setAwsSecret(String awsSecret) {
+ this.awsSecret = awsSecret;
}
}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretRevokeEndpoint.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretRevokeEndpoint.java
new file mode 100644
index 0000000000..423790ba92
--- /dev/null
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/S3SecretRevokeEndpoint.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.s3secret;
+
+import org.apache.hadoop.ozone.audit.S3GAction;
+import org.apache.hadoop.ozone.om.exceptions.OMException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+
+import static javax.ws.rs.core.Response.Status.NOT_FOUND;
+
+/**
+ * Revoke secret endpoint.
+ */
+@Path("/secret/revoke")
+@S3SecretEnabled
+public class S3SecretRevokeEndpoint extends S3SecretEndpointBase {
+
+ private static final Logger LOG =
+ LoggerFactory.getLogger(S3SecretRevokeEndpoint.class);
+
+
+ @POST
+ public Response revoke() throws IOException {
+ try {
+ revokeSecret();
+ AUDIT.logWriteSuccess(buildAuditMessageForSuccess(
+ S3GAction.REVOKE_SECRET, getAuditParameters()));
+ return Response.ok().build();
+ } catch (OMException e) {
+ AUDIT.logWriteFailure(buildAuditMessageForFailure(
+ S3GAction.REVOKE_SECRET, getAuditParameters(), e));
+ if (e.getResult() == OMException.ResultCodes.S3_SECRET_NOT_FOUND) {
+ return Response.status(NOT_FOUND.getStatusCode(),
+ OMException.ResultCodes.S3_SECRET_NOT_FOUND.toString())
+ .build();
+ } else {
+ LOG.error("Can't execute revoke secret request: ", e);
+ return Response.serverError().build();
+ }
+ }
+ }
+
+ private void revokeSecret() throws IOException {
+ getClient().getObjectStore().revokeS3Secret(userNameFromRequest());
+ }
+
+}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/package-info.java
similarity index 76%
copy from
hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
copy to
hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/package-info.java
index c5a291b445..49bf34e068 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/GatewayApplication.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3secret/package-info.java
@@ -15,15 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.ozone.s3;
-
-import org.glassfish.jersey.server.ResourceConfig;
/**
- * JaxRS resource definition.
+ * This package contains the top level generic classes of s3 secret gateway.
*/
-public class GatewayApplication extends ResourceConfig {
- public GatewayApplication() {
- packages("org.apache.hadoop.ozone.s3");
- }
-}
+package org.apache.hadoop.ozone.s3secret;
diff --git
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ClientProtocolStub.java
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ClientProtocolStub.java
index f5fc4f17fe..4eeb2ecaa1 100644
---
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ClientProtocolStub.java
+++
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ClientProtocolStub.java
@@ -19,6 +19,7 @@
*/
package org.apache.hadoop.ozone.client;
+import javax.annotation.Nonnull;
import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicationFactor;
@@ -61,6 +62,8 @@ import java.util.Map;
* ClientProtocol implementation with in-memory state.
*/
public class ClientProtocolStub implements ClientProtocol {
+ private static final String STUB_KERBEROS_ID = "stub_kerberos_id";
+ private static final String STUB_SECRET = "stub_secret";
private final ObjectStoreStub objectStoreStub;
public ClientProtocolStub(ObjectStoreStub objectStoreStub) {
@@ -366,8 +369,9 @@ public class ClientProtocolStub implements ClientProtocol {
}
@Override
+ @Nonnull
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
- return null;
+ return new S3SecretValue(STUB_KERBEROS_ID, STUB_SECRET);
}
@Override
diff --git
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
index a4ab45740f..b79e49f834 100644
---
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
+++
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
@@ -26,8 +26,10 @@ import java.util.Iterator;
import java.util.Map;
import java.util.stream.Collectors;
+import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.util.Time;
@@ -45,6 +47,10 @@ public class ObjectStoreStub extends ObjectStore {
super();
}
+ public ObjectStoreStub(ConfigurationSource conf, ClientProtocol proxy) {
+ super(conf, proxy);
+ }
+
private Map<String, OzoneVolumeStub> volumes = new HashMap<>();
private Map<String, Boolean> bucketEmptyStatus = new HashMap<>();
private static OzoneConfiguration conf = new OzoneConfiguration();
diff --git
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3secret/TestSecretGenerate.java
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3secret/TestSecretGenerate.java
new file mode 100644
index 0000000000..6a8b63e83a
--- /dev/null
+++
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3secret/TestSecretGenerate.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.s3secret;
+
+import java.io.IOException;
+import java.security.Principal;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.core.MultivaluedHashMap;
+import javax.ws.rs.core.SecurityContext;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.client.ObjectStoreStub;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneClientStub;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.when;
+
+/**
+ * Test for S3 secret generate endpoint.
+ */
+@ExtendWith(MockitoExtension.class)
+public class TestSecretGenerate {
+ private static final String USER_NAME = "test";
+ private static final String USER_SECRET = "test_secret";
+
+ private S3SecretGenerateEndpoint endpoint;
+
+ @Mock
+ private ClientProtocol proxy;
+ @Mock
+ private ContainerRequestContext context;
+ @Mock
+ private UriInfo uriInfo;
+ @Mock
+ private SecurityContext securityContext;
+ @Mock
+ private Principal principal;
+
+ @BeforeEach
+ void setUp() throws IOException {
+ S3SecretValue value = new S3SecretValue(USER_NAME, USER_SECRET);
+ when(proxy.getS3Secret(eq(USER_NAME))).thenReturn(value);
+ OzoneConfiguration conf = new OzoneConfiguration();
+ OzoneClient client = new OzoneClientStub(new ObjectStoreStub(conf, proxy));
+
+ when(principal.getName()).thenReturn(USER_NAME);
+ when(securityContext.getUserPrincipal()).thenReturn(principal);
+ when(context.getSecurityContext()).thenReturn(securityContext);
+
+ when(uriInfo.getPathParameters()).thenReturn(new MultivaluedHashMap<>());
+ when(uriInfo.getQueryParameters()).thenReturn(new MultivaluedHashMap<>());
+ when(context.getUriInfo()).thenReturn(uriInfo);
+
+ endpoint = new S3SecretGenerateEndpoint();
+ endpoint.setClient(client);
+ endpoint.setContext(context);
+ }
+
+ @Test
+ void testSecretGenerate() throws IOException {
+ S3SecretResponse response =
+ (S3SecretResponse) endpoint.generate().getEntity();
+ assertEquals(USER_SECRET, response.getAwsSecret());
+ assertEquals(USER_NAME, response.getAwsAccessKey());
+ }
+}
diff --git
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3secret/TestSecretRevoke.java
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3secret/TestSecretRevoke.java
new file mode 100644
index 0000000000..8a1f81d132
--- /dev/null
+++
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3secret/TestSecretRevoke.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.s3secret;
+
+import java.io.IOException;
+import java.security.Principal;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.core.MultivaluedHashMap;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.SecurityContext;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.hadoop.ozone.client.ObjectStoreStub;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneClientStub;
+import org.apache.hadoop.ozone.om.exceptions.OMException;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static javax.ws.rs.core.Response.Status.NOT_FOUND;
+import static javax.ws.rs.core.Response.Status.OK;
+import static
org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes.ACCESS_DENIED;
+import static
org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes.S3_SECRET_NOT_FOUND;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+/**
+ * Test for S3 secret revoke endpoint.
+ */
+@ExtendWith(MockitoExtension.class)
+public class TestSecretRevoke {
+ private static final String USER_NAME = "test";
+
+ private S3SecretRevokeEndpoint endpoint;
+
+ @Mock
+ private ObjectStoreStub objectStore;
+ @Mock
+ private ContainerRequestContext context;
+ @Mock
+ private UriInfo uriInfo;
+ @Mock
+ private SecurityContext securityContext;
+ @Mock
+ private Principal principal;
+
+ @BeforeEach
+ void setUp() {
+ OzoneClient client = new OzoneClientStub(objectStore);
+
+ when(principal.getName()).thenReturn(USER_NAME);
+ when(securityContext.getUserPrincipal()).thenReturn(principal);
+ when(context.getSecurityContext()).thenReturn(securityContext);
+
+ when(uriInfo.getPathParameters()).thenReturn(new MultivaluedHashMap<>());
+ when(uriInfo.getQueryParameters()).thenReturn(new MultivaluedHashMap<>());
+ when(context.getUriInfo()).thenReturn(uriInfo);
+
+ endpoint = new S3SecretRevokeEndpoint();
+ endpoint.setClient(client);
+ endpoint.setContext(context);
+ }
+
+ @Test
+ void testSecretRevoke() throws IOException {
+ endpoint.revoke();
+ verify(objectStore, times(1)).revokeS3Secret(eq(USER_NAME));
+ }
+
+ @Test
+ void testSecretSequentialRevokes() throws IOException {
+ Response firstResponse = endpoint.revoke();
+ assertEquals(OK.getStatusCode(), firstResponse.getStatus());
+ doThrow(new OMException(S3_SECRET_NOT_FOUND))
+ .when(objectStore).revokeS3Secret(any());
+ Response secondResponse = endpoint.revoke();
+ assertEquals(NOT_FOUND.getStatusCode(), secondResponse.getStatus());
+ }
+
+ @Test
+ void testSecretRevokesHandlesException() throws IOException {
+ doThrow(new OMException(ACCESS_DENIED))
+ .when(objectStore).revokeS3Secret(any());
+ Response response = endpoint.revoke();
+ assertEquals(INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus());
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]