This is an automated email from the ASF dual-hosted git repository.
benyoka pushed a commit to branch branch-feature-AMBARI-14714
in repository https://gitbox.apache.org/repos/asf/ambari.git
The following commit(s) were added to refs/heads/branch-feature-AMBARI-14714 by
this push:
new 0e70cf2 [AMBARI-23130] Retrieve cluster template as artifact with
passwords filtered out. (#931)
0e70cf2 is described below
commit 0e70cf2ab030e5eaebad9fb0d0f184c988e1bed3
Author: benyoka <[email protected]>
AuthorDate: Wed Apr 11 13:44:07 2018 +0200
[AMBARI-23130] Retrieve cluster template as artifact with passwords
filtered out. (#931)
* AMBARI-23130 persist raw cluster provision request and extract stack ids
on server restart (benyoka)
* AMBARI-23130 add columnt to other DDLs + fix DDLs (benyoka)
* AMBARI-23130 fix review findings (benyoka)
* AMBARI-23130 persist only mpack instances instead of the full request
* AMBARI-23130 address review findings (benyoka)
* AMBARI-23130 topology request mpack information normalized (benyoka)
* AMBARI-23130 fix broken unit test (benyoka)
* AMBARI-23130 fix import and review comments
* AMBARI-23130 persist cluster template as artifact WIP (benyoka)
* AMBARI-23130 cluster template passwords replaced - almost complete
(benyoka)
* AMBARI-23130 save/retrieve cluster template as artifact review candidate
(benyoka)
* AMBARI-23130 fix review comments (benyoka)
* AMBARI-23130 fix review comments #2 (benyoka)
* AMBARI-23130 fix import (benyoka)
---
.../internal/ArtifactResourceProvider.java | 11 +-
.../internal/ClusterResourceProvider.java | 2 +-
.../ClusterTemplateArtifactPasswordReplacer.java | 160 +++++++++++++++++++
.../apache/ambari/server/state/ServiceInfo.java | 2 +-
.../ambari/server/topology/Configurable.java | 42 ++++-
.../ambari/server/topology/TopologyManager.java | 44 ++++--
.../ambari/server/utils/SecretReference.java | 142 ++++++++++++++++-
.../internal/ArtifactResourceProviderTest.java | 96 +++++++++++-
.../internal/ClusterResourceProviderTest.java | 8 +-
...lusterTemplateArtifactPasswordReplacerTest.java | 170 +++++++++++++++++++++
.../topology/ClusterDeployWithStartOnlyTest.java | 2 +-
...terInstallWithoutStartOnComponentLevelTest.java | 2 +-
.../topology/ClusterInstallWithoutStartTest.java | 2 +-
.../server/topology/TopologyManagerTest.java | 16 +-
.../ambari/server/utils/SecretReferenceTest.java | 131 ++++++++++++++++
15 files changed, 792 insertions(+), 38 deletions(-)
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
index 567cfa9..4fda556 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.controller.internal;
+
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@@ -75,6 +76,8 @@ public class ArtifactResourceProvider extends
AbstractResourceProvider {
public static final String CLUSTER_NAME_PROPERTY = RESPONSE_KEY +
PropertyHelper.EXTERNAL_PATH_SEP + CLUSTER_NAME;
public static final String SERVICE_NAME_PROPERTY = RESPONSE_KEY +
PropertyHelper.EXTERNAL_PATH_SEP + SERVICE_NAME;
+ public static final String PROVISION_REQUEST_ARTIFACT_NAME =
"provision_cluster_request";
+
/**
* primary key fields
*/
@@ -525,7 +528,13 @@ public class ArtifactResourceProvider extends
AbstractResourceProvider {
private Resource toResource(ArtifactEntity entity, Set<String> requestedIds)
throws AmbariException {
Resource resource = new ResourceImpl(Resource.Type.Artifact);
setResourceProperty(resource, ARTIFACT_NAME_PROPERTY,
entity.getArtifactName(), requestedIds);
- setResourceProperty(resource, ARTIFACT_DATA_PROPERTY,
entity.getArtifactData(), requestedIds);
+ Map<String, Object> artifactData =
+ PROVISION_REQUEST_ARTIFACT_NAME.equals(entity.getArtifactName()) ?
+ // replace passwords for cluster template artifacts
+ new
ClusterTemplateArtifactPasswordReplacer().replacePasswords(entity.getArtifactData())
:
+ // return data as is for other artifacts
+ entity.getArtifactData();
+ setResourceProperty(resource, ARTIFACT_DATA_PROPERTY, artifactData,
requestedIds);
for (Map.Entry<String, String> entry : entity.getForeignKeys().entrySet())
{
TypeRegistration typeRegistration =
typeRegistrationsByShortFK.get(entry.getKey());
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index f0dd517..4454195 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -540,7 +540,7 @@ public class ClusterResourceProvider extends
AbstractControllerResourceProvider
}
try {
- return topologyManager.provisionCluster(createClusterRequest);
+ return topologyManager.provisionCluster(createClusterRequest,
rawRequestBody);
} catch (InvalidTopologyException e) {
throw new IllegalArgumentException("Topology validation failed: " + e,
e);
} catch (AmbariException e) {
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterTemplateArtifactPasswordReplacer.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterTemplateArtifactPasswordReplacer.java
new file mode 100644
index 0000000..3caeb0f
--- /dev/null
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterTemplateArtifactPasswordReplacer.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import static java.util.Collections.emptyList;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toMap;
+
+import java.util.AbstractMap;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.topology.Configurable;
+import org.apache.ambari.server.topology.Configuration;
+import org.apache.ambari.server.utils.SecretReference;
+
+import com.google.common.collect.Multimap;
+
+/**
+ * Helper class for replacing password properties in cluster template
artifacts. {@see #replacePasswords}
+ */
+public class ClusterTemplateArtifactPasswordReplacer {
+
+ /**
+ * Replaces all passwords (service config passwords and default password) in
a received cluster creation template
+ * artifact. Mpack (stack) information is used to identify password type
properties. If the cluster template does not
+ * specify mpacks, all installed stacks are queried for password type
properties.
+ *
+ * @param artifactData the raw cluster template artifact as parsed json
+ * @return the cluster template artifact with passwords replaced
+ */
+ public Map<String, Object> replacePasswords(Map<String, Object>
artifactData) {
+ Collection<StackId> stackIds =
extractStackIdsFromClusterRequest(artifactData);
+ // get all password properties from the specified stacks or
+ // all stacks if the cluster template doesn't specify mpacks
+ Multimap<String, String> passwordProperties = stackIds.isEmpty() ?
+ SecretReference.getAllPasswordProperties() :
+ SecretReference.getAllPasswordProperties(stackIds);
+ Map<String, Object> passwordsReplaced =
replacePasswordsInConfigurations(artifactData, passwordProperties);
+ passwordsReplaced.replace("default_password",
SecretReference.SECRET_PREFIX + ":default_password");
+ return passwordsReplaced;
+ }
+
+ /**
+ * Replaces passwords in the received cluster template artifact based on the
received password information extracted
+ * from stacks.
+ * @param artifactData the cluster template artifact
+ * @param passwordProperties a multimap containing password type properties.
The map has a structure of
+ * config type -> password properties.
+ * @return the cluster template artifact with passwords replaced
+ */
+ protected Map<String, Object> replacePasswordsInConfigurations(Map<String,
Object> artifactData,
+
Multimap<String, String> passwordProperties) {
+ return (Map<String, Object>)
+ applyToAllConfigurations(artifactData,
+ config -> {
+ Configuration configuration = Configurable.parseConfigs(config);
+ Configuration replacedConfiguration =
+ SecretReference.replacePasswordsInConfigurations(configuration,
passwordProperties);
+ return Configurable.convertConfigToMap(replacedConfiguration);
+ }
+ );
+ }
+
+ /**
+ * <p> Recursively scans the received data structure consisting of maps,
lists ans simple values (a parsed Json) and
+ * applies the @{code transform} function to each configurations found. </p>
+ * <p> A value counts as configuration if it has a type of @{link
java.util.List} and is a value in a map with
+ * {@code "configurations"} key.
+ *
+ * </p>
+ * @param data the data to process recursively (is a structure of maps,
lists and simple values)
+ * @param transform the transformation to apply to configuration values
+ * @return a replication of the input data with the transformation applied
to all configuration values.
+ */
+ protected Object applyToAllConfigurations(Object data,
+ Function<List<Map<String,
Object>>, Object> transform) {
+ if (data instanceof List<?>) {
+ return processList((List<Object>)data, transform);
+ }
+ else if (data instanceof Map<?, ?>) {
+ return processMap((Map<String, Object>)data, transform);
+ }
+ else {
+ return data;
+ }
+ }
+
+ /**
+ * Recursively call {@link #applyToAllConfigurations(Object, Function)} on
all items in the list
+ * @param listItem the list to process
+ * @param transform the transformation to be passed to {@code
applyToAllConfigurations}
+ * @return the transformed list
+ */
+ protected List<Object> processList(List<Object> listItem,
Function<List<Map<String, Object>>, Object> transform) {
+ return listItem.stream().
+ map(item -> applyToAllConfigurations(item, transform)).
+ collect(toList());
+ }
+
+ /**
+ * Process map items in a cluster template artifact structure. For all
configuration type map entries,
+ * {@code transform} will be called on the value. For other entries {@link
#applyToAllConfigurations(Object, Function)}
+ * will be called recursively
+ * @param mapItem the map to process
+ * @param transform the transformation to apply on configuration type entries
+ * @return the transformed map
+ */
+ protected Map<String, Object> processMap(Map<String, Object> mapItem,
Function<List<Map<String, Object>>, Object> transform) {
+ return mapItem.entrySet().stream().map(
+ entry -> {
+ // apply transformation for configuration entries
+ if ("configurations".equals(entry.getKey()) && entry.getValue()
instanceof List<?>) {
+ return new AbstractMap.SimpleEntry<>(
+ entry.getKey(),
+ transform.apply((List<Map<String, Object>>)entry.getValue()));
+ }
+ // recursively call applyToAllConfigurations() for non-configuration
entries
+ else {
+ return new AbstractMap.SimpleEntry<>(
+ entry.getKey(),
+ applyToAllConfigurations(entry.getValue(), transform));
+ }
+ }).collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
+ }
+
+ /**
+ * Extracts mpack / stack information from the cluster template artifacts
+ * @param artifactData the cluster template artifact
+ * @return a collection of stack id's (an empty collection if the artifact
does not contain mpack references)
+ */
+ private Collection<StackId> extractStackIdsFromClusterRequest(Map<String,
Object> artifactData) {
+ List<Map<String, Object>> mpackInstances =
+ (List<Map<String, Object>>)artifactData.getOrDefault("mpack_instances",
emptyList());
+ return mpackInstances.stream().
+ map(mpackMap -> new StackId((String)mpackMap.get("name"),
(String)mpackMap.get("version"))).
+ collect(toList());
+ }
+
+
+}
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
index f939a53..4784c64 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
@@ -467,7 +467,7 @@ public class ServiceInfo implements Validable {
return properties;
}
- public void setProperties(List properties) {
+ public void setProperties(List<PropertyInfo> properties) {
this.properties = properties;
}
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java
b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java
index c20d10a..d42b966 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java
@@ -43,6 +43,32 @@ public interface Configurable {
@JsonProperty("configurations")
default void setConfigs(Collection<? extends Map<String, ?>> configs) {
+ setConfiguration(parseConfigs(configs));
+ }
+
+ /**
+ * Parses configuration maps The configs can be in fully structured JSON,
e.g.
+ * <code>
+ * [{"hdfs-site":
+ * "properties": {
+ * ""dfs.replication": "3",
+ * ...
+ * },
+ * properties_attributes: {}
+ * }]
+ * </code>
+ * or '/' separated like
+ * <code>
+ * [{
+ * "hdfs-site/properties/dfs.replication": "3",
+ * ...
+ * }]
+ * </code>
+ * In the latter case it calls {@link
ConfigurationFactory#getConfiguration(Collection)}
+ * @param configs
+ * @return
+ */
+ static Configuration parseConfigs(Collection<? extends Map<String, ?>>
configs) {
Configuration configuration;
if (!configs.isEmpty() &&
configs.iterator().next().keySet().iterator().next().contains("/")) {
// Configuration has keys with slashes like "zk.cfg/properties/dataDir"
means it is coming through
@@ -69,12 +95,15 @@ public interface Configurable {
});
configuration = new Configuration(allProperties, allAttributes);
}
- setConfiguration(configuration);
+ return configuration;
}
- @JsonProperty("configurations")
- default Collection<Map<String, Map<String, ?>>> getConfigs() {
- Configuration configuration = getConfiguration();
+ /**
+ * Converts {@link Configuration} objects to a collection easily
serializable to Json
+ * @param configuration the configuration to convert
+ * @return the resulting collection
+ */
+ static Collection<Map<String, Map<String, ?>>>
convertConfigToMap(Configuration configuration) {
Collection<Map<String, Map<String, ?>>> configurations = new ArrayList<>();
Set<String> allConfigTypes =
Sets.union(configuration.getProperties().keySet(),
configuration.getAttributes().keySet());
for (String configType: allConfigTypes) {
@@ -90,4 +119,9 @@ public interface Configurable {
return configurations;
}
+ @JsonProperty("configurations")
+ default Collection<Map<String, Map<String, ?>>> getConfigs() {
+ return convertConfigToMap(getConfiguration());
+ }
+
}
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
index 81082aa..3fd37aa 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
@@ -101,6 +101,7 @@ public class TopologyManager {
public static final String INITIAL_CONFIG_TAG = "INITIAL";
public static final String TOPOLOGY_RESOLVED_TAG = "TOPOLOGY_RESOLVED";
public static final String KDC_ADMIN_CREDENTIAL = "kdc.admin.credential";
+ public static final String RAW_REQUEST_BODY_ARTIFACT_NAME =
"raw_provision_cluster_template";
private PersistedState persistedState;
@@ -277,7 +278,8 @@ public class TopologyManager {
return
isLogicalRequestFinished(clusterProvisionWithBlueprintCreateRequests.get(clusterId));
}
- public RequestStatusResponse provisionCluster(final ProvisionClusterRequest
request) throws InvalidTopologyException, AmbariException {
+ public RequestStatusResponse provisionCluster(final ProvisionClusterRequest
request,
+ String rawRequestBody) throws
InvalidTopologyException, AmbariException {
ensureInitialized();
BlueprintBasedClusterProvisionRequest provisionRequest = new
BlueprintBasedClusterProvisionRequest(ambariContext,
securityConfigurationFactory, request.getBlueprint(), request);
@@ -301,6 +303,8 @@ public class TopologyManager {
submitKerberosDescriptorAsArtifact(clusterName,
securityConfiguration.getDescriptor());
}
+ submitRawRequestBodyAsArtifact(clusterName, rawRequestBody);
+
if (securityConfiguration.getType() == SecurityType.KERBEROS) {
Credential credential =
request.getCredentialsMap().get(KDC_ADMIN_CREDENTIAL);
submitCredential(clusterName, credential);
@@ -317,7 +321,8 @@ public class TopologyManager {
LogicalRequest logicalRequest = RetryHelper.executeWithRetry(new
Callable<LogicalRequest>() {
@Override
public LogicalRequest call() throws Exception {
- LogicalRequest logicalRequest =
processAndPersistProvisionClusterTopologyRequest(request, topology,
provisionId);
+ LogicalRequest logicalRequest =
+ processAndPersistProvisionClusterTopologyRequest(request,
topology, provisionId);
return logicalRequest;
}
}
@@ -406,27 +411,41 @@ public class TopologyManager {
}
+ private void submitRawRequestBodyAsArtifact(String clusterName, String
rawRequestBody) {
+ submitArtifact(clusterName,
ArtifactResourceProvider.PROVISION_REQUEST_ARTIFACT_NAME, rawRequestBody);
+ }
+
+
private void submitKerberosDescriptorAsArtifact(String clusterName, String
descriptor) {
+ submitArtifact(clusterName, "kerberos_descriptor", descriptor);
+ }
+ /**
+ * Submits an artifact to {@link ArtifactResourceProvider} for persistence
+ * @param clusterName the cluster name
+ * @param artifactName the artifact name (kerberos_descriptor or
provision_cluster_request)
+ * @param artifactJson the artifact as json string
+ */
+ private void submitArtifact(String clusterName, String artifactName, String
artifactJson) {
ResourceProvider artifactProvider =
-
AmbariContext.getClusterController().ensureResourceProvider(Resource.Type.Artifact);
+
AmbariContext.getClusterController().ensureResourceProvider(Resource.Type.Artifact);
Map<String, Object> properties = new HashMap<>();
- properties.put(ArtifactResourceProvider.ARTIFACT_NAME_PROPERTY,
"kerberos_descriptor");
+ properties.put(ArtifactResourceProvider.ARTIFACT_NAME_PROPERTY,
artifactName);
properties.put("Artifacts/cluster_name", clusterName);
Map<String, String> requestInfoProps = new HashMap<>();
requestInfoProps.put(org.apache.ambari.server.controller.spi.Request.REQUEST_INFO_BODY_PROPERTY,
- "{\"" + ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "\": " +
descriptor + "}");
+ "{\"" + ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "\": " +
artifactJson + "}");
org.apache.ambari.server.controller.spi.Request request = new
RequestImpl(Collections.emptySet(),
- Collections.singleton(properties), requestInfoProps, null);
+ Collections.singleton(properties), requestInfoProps, null);
try {
RequestStatus status = artifactProvider.createResources(request);
try {
while (status.getStatus() != RequestStatus.Status.Complete) {
- LOG.info("Waiting for kerberos_descriptor artifact creation.");
+ LOG.info("Waiting for {} artifact creation.", artifactName);
Thread.sleep(100);
}
} catch (InterruptedException e) {
@@ -434,16 +453,17 @@ public class TopologyManager {
}
if (status.getStatus() != RequestStatus.Status.Complete) {
- throw new RuntimeException("Failed to attach kerberos_descriptor
artifact to cluster!");
+ throw new RuntimeException("Failed to attach " + artifactName + "
artifact to cluster!");
}
} catch (SystemException | UnsupportedPropertyException |
NoSuchParentResourceException e) {
- throw new RuntimeException("Failed to attach kerberos_descriptor
artifact to cluster: " + e);
+ throw new RuntimeException("Failed to attach " + artifactName + "
artifact to cluster: " + e);
} catch (ResourceAlreadyExistsException e) {
- throw new RuntimeException("Failed to attach kerberos_descriptor
artifact to cluster as resource already exists.");
+ throw new RuntimeException("Failed to attach " + artifactName + "
artifact to cluster as resource already exists.");
}
}
+
public RequestStatusResponse scaleHosts(final ScaleClusterRequest request)
throws InvalidTopologyException, AmbariException {
@@ -550,7 +570,7 @@ public class TopologyManager {
/**
* Creates and persists a {@see PersistedTopologyRequest} and a {@see
LogicalRequest} for the provided
- * provision cluster request and topology.
+ * provision cluster request and topology. Also persists the quick links
profile if present.
* @param request Provision cluster request to create a logical request for.
* @param topology Cluster topology
* @param logicalRequestId The Id for the created logical request
@@ -560,10 +580,12 @@ public class TopologyManager {
protected LogicalRequest
processAndPersistProvisionClusterTopologyRequest(ProvisionClusterRequest
request, ClusterTopology topology, Long logicalRequestId)
throws InvalidTopologyException, AmbariException {
+ // persist quick links profile if present
if (null != request.getQuickLinksProfileJson()) {
saveOrUpdateQuickLinksProfile(request.getQuickLinksProfileJson());
}
+ // create and persist topology request
LogicalRequest logicalRequest = processAndPersistTopologyRequest(request,
topology, logicalRequestId);
return logicalRequest;
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
b/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
index dfd925d..2e4ee88 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
@@ -18,16 +18,33 @@
package org.apache.ambari.server.utils;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toMap;
+
+import java.util.AbstractMap.SimpleEntry;
+import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
+import java.util.function.Function;
import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.StackAccessException;
import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.AmbariServer;
+import org.apache.ambari.server.stack.StackDirectory;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.topology.Configuration;
+import org.apache.commons.lang3.StringUtils;
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Multimap;
+import com.google.common.collect.SetMultimap;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.google.inject.Inject;
@@ -35,7 +52,7 @@ import com.google.inject.Inject;
@StaticallyInject
public class SecretReference {
- private static final String secretPrefix = "SECRET";
+ public static final String SECRET_PREFIX = "SECRET";
private String configType;
private Long version;
private String value;
@@ -81,11 +98,11 @@ public class SecretReference {
public static boolean isSecret(String value) {
String[] values = value.split(":");
- return values.length == 4 && values[0].equals(secretPrefix);
+ return values.length == 4 && values[0].equals(SECRET_PREFIX);
}
public static String generateStub(String configType, Long configVersion,
String propertyName) {
- return secretPrefix + ":" + configType + ":" + configVersion + ":" +
propertyName;
+ return SECRET_PREFIX + ":" + configType + ":" + configVersion + ":" +
propertyName;
}
/**
@@ -100,7 +117,7 @@ public class SecretReference {
for (Map.Entry<String, String> e : map.entrySet()) {
String value = e.getValue();
if (e.getKey().toLowerCase().contains(PASSWORD_TEXT) ||
e.getKey().toLowerCase().contains(PASSWD_TEXT)) {
- value = secretPrefix;
+ value = SECRET_PREFIX;
}
maskedMap.put(e.getKey(), value);
}
@@ -151,6 +168,51 @@ public class SecretReference {
}
/**
+ * Returns all password properties defined in the stacks specified by the
given stack id's. Keys in the map are
+ * file names (e.g hadoop-env.xml) and values are property names.
+ * @param stackIds the stack ids to specify which stacks to look for
+ * @return A set multimap of password type properties.
+ * @throws IllegalArgumentException when a non-existing stack is specified
+ */
+ public static SetMultimap<String, String>
getAllPasswordProperties(Collection<StackId> stackIds) throws
IllegalArgumentException {
+ AmbariMetaInfo metaInfo = AmbariServer.getController().getAmbariMetaInfo();
+ Collection<StackInfo> stacks = stackIds.stream().map(
+ stackId -> {
+ try {
+ return metaInfo.getStack(stackId);
+ }
+ catch (StackAccessException ex) {
+ throw new IllegalArgumentException(ex);
+ }
+ }
+ ).collect(toList());
+ return getAllPasswordPropertiesInternal(stacks);
+ }
+
+ /**
+ * Returns all password properties defined in all stacks. Keys in the map are
+ * file names (e.g hadoop-env.xml) and values are property names.
+ * @return A set multimap of password type properties.
+ */
+ public static SetMultimap<String, String> getAllPasswordProperties() {
+ AmbariMetaInfo metaInfo = AmbariServer.getController().getAmbariMetaInfo();
+ return getAllPasswordPropertiesInternal(metaInfo.getStacks());
+ }
+
+ static SetMultimap<String, String>
getAllPasswordPropertiesInternal(Collection<StackInfo> stacks) {
+ SetMultimap<String, String> passwordPropertyMap = HashMultimap.create();
+ stacks.stream().
+ flatMap(stack -> stack.getServices().stream()).
+ flatMap(serviceInfo -> serviceInfo.getProperties().stream()).
+ filter(propertyInfo ->
propertyInfo.getPropertyTypes().contains(PropertyInfo.PropertyType.PASSWORD)).
+ forEach(propertyInfo -> passwordPropertyMap.put(
+ StringUtils.removeEnd(propertyInfo.getFilename(),
StackDirectory.SERVICE_CONFIG_FILE_NAME_POSTFIX),
+ propertyInfo.getName())
+ );
+ return passwordPropertyMap;
+ }
+
+ /**
* Replace real passwords with secret references
* @param configAttributes map with config attributes containing properties
types as part of their content
* @param propertiesMap map with properties in which replacement will be
performed
@@ -172,4 +234,76 @@ public class SecretReference {
}
}
}
+
+ /**
+ * Replaces all password type properties in the given {@link Configuration}
object. Creates a new Configuration
+ * object instead of mutating the input configuration.
+ * @param configuration the input configuration
+ * @param passwordProperties password type properties in a multimap.
+ * It has {@code config-type -> [password-prop-1,
password-prop-2, ...]} structure.
+ * @return a new configuration with password properties replaced
+ */
+ public static Configuration replacePasswordsInConfigurations(Configuration
configuration,
+
Multimap<String, String> passwordProperties) {
+ // replace passwords in config properties
+ Map<String, Map<String, String>> replacedProperties = replaceInInnerMap(
+ configuration.getProperties(),
+ // in this case the map has {"configType": {"property": "value"}}
structure
+ entry -> entry.getKey(),
+ passwordProperties);
+
+ // replace passwords in config attributes
+ Map<String, Map<String, Map<String, String>>> replacedAttributes =
configuration.getAttributes().entrySet().stream().map(
+ configTypeEntry -> {
+ Map<String, Map<String, String>> replacedConfigProps =
replaceInInnerMap(
+ configTypeEntry.getValue(),
+ // in this case the map has {"attributeType": {"property": "value"}}
structure, the config type comes from the outer map
+ entry -> configTypeEntry.getKey(),
+ passwordProperties);
+ return new SimpleEntry<>(configTypeEntry.getKey(),
replacedConfigProps);
+ }
+ ).collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
+
+ return new Configuration(replacedProperties, replacedAttributes);
+ }
+
+ /**
+ * @param input the map that contains a property map
+ * @param configType a function that calculates the config type based on the
actual map entry processed
+ * during transformation
+ * @param passwordProperties password type properties in a multimap.
+ * It has {@code config-type -> [password-prop-1,
password-prop-2, ...]} structure.
+ * @return a new map in which the inner property map has its password
properties replaced
+ */
+ private static Map<String, Map<String, String>>
replaceInInnerMap(Map<String, Map<String, String>> input,
+
Function<Map.Entry<String, Map<String, String>>, String> configType,
+
Multimap<String, String> passwordProperties) {
+ return input.entrySet().stream().collect(toMap(
+ Map.Entry::getKey,
+ entry -> replacePasswordsInPropertyMap(entry.getValue(),
configType.apply(entry), passwordProperties)
+ ));
+ }
+
+ /**
+ * Replaces all password type properties in the given property map. Creates
a new map instead of mutating
+ * the input configuration.
+ * @param propertyMap the input property map
+ * @param passwordProperties password type properties in a multimap.
+ * It has {@code config-type -> [password-prop-1,
password-prop-2, ...]} structure.
+ * @return a new property map with password properties replaced
+ */
+ public static Map<String, String> replacePasswordsInPropertyMap(Map<String,
String> propertyMap,
+ String
configType,
+
Multimap<String, String> passwordProperties) {
+ return propertyMap.entrySet().stream().map(
+ entry -> {
+ String propertyType = entry.getKey();
+ String newValue =
passwordProperties.get(configType).contains(propertyType) ?
+ SECRET_PREFIX + ":" + configType + ":" + propertyType :
+ entry.getValue();
+ return new SimpleEntry<>(propertyType, newValue);
+ }
+ ).collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
+ }
+
}
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
index 2a0fdaa..afd8e4a 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.controller.internal;
+import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
@@ -56,17 +57,27 @@ import org.apache.ambari.server.orm.dao.ArtifactDAO;
import org.apache.ambari.server.orm.entities.ArtifactEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.utils.SecretReference;
import org.easymock.Capture;
import org.easymock.IAnswer;
import org.junit.Before;
import org.junit.Test;
-
+import org.junit.runner.RunWith;
+import org.powermock.api.easymock.PowerMock;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSetMultimap;
+import com.google.common.collect.ImmutableSortedMap;
import com.google.gson.Gson;
/**
* ArtifactResourceProvider unit tests.
*/
@SuppressWarnings("unchecked")
+@RunWith(PowerMockRunner.class)
public class ArtifactResourceProviderTest {
private ArtifactDAO dao = createStrictMock(ArtifactDAO.class);
@@ -150,6 +161,89 @@ public class ArtifactResourceProviderTest {
assertEquals("child4Value",
resource.getPropertyValue("artifact_data/child/child2/child3/child4/child4Key"));
}
+ /**
+ * Test to prove that passwords are replaced with references in case the
retrieved artifact is of type
+ * {@link ArtifactResourceProvider#PROVISION_REQUEST_ARTIFACT_NAME}
+ */
+ @Test
+ @PrepareForTest(SecretReference.class)
+ public void testGetResources_clusterTemplatePasswordsAreReplaced() throws
Exception {
+ // given
+ TreeMap<String, String> foreignKeys = new
TreeMap<>(ImmutableSortedMap.of("cluster", "500"));
+ TreeMap<String, String> responseForeignKeys = new TreeMap<>(foreignKeys);
+
+ Map<String, Object> artifactData = ImmutableMap.of(
+ "configurations",
+ ImmutableList.of(
+ ImmutableMap.of("ranger-yarn-audit",
+ ImmutableMap.of("properties",
+ ImmutableMap.of(
+ "xasecure.policymgr.clientssl.keystore.password",
"You'llNeverGuess",
+ "xasecure.policymgr.clientssl.keystore.credential.file",
"jceks:/dev/null")))),
+ "default_password", "TopSecurePassword",
+ "mpack_instances",
+ ImmutableList.of(
+ ImmutableMap.of(
+ "name", "HDPCORE",
+ "version", "1.0.0")));
+
+ // expectations
+ expect(controller.getClusters()).andReturn(clusters).anyTimes();
+ expect(clusters.getCluster("test-cluster")).andReturn(cluster).anyTimes();
+ expect(clusters.getClusterById(500L)).andReturn(cluster).anyTimes();
+ expect(cluster.getClusterId()).andReturn(500L).anyTimes();
+ expect(cluster.getClusterName()).andReturn("test-cluster").anyTimes();
+
+ expect(request.getPropertyIds()).andReturn(new HashSet<>()).anyTimes();
+
+
expect(dao.findByNameAndForeignKeys(eq(ArtifactResourceProvider.PROVISION_REQUEST_ARTIFACT_NAME),
+ eq(foreignKeys))).andReturn(entity).once();
+
expect(entity.getArtifactName()).andReturn(ArtifactResourceProvider.PROVISION_REQUEST_ARTIFACT_NAME).anyTimes();
+ expect(entity.getForeignKeys()).andReturn(responseForeignKeys).anyTimes();
+ expect(entity.getArtifactData()).andReturn(artifactData).anyTimes();
+
+ PowerMock.mockStaticPartial(SecretReference.class,
"getAllPasswordProperties");
+
expect(SecretReference.getAllPasswordProperties(anyObject(Collection.class))).andReturn(
+ ImmutableSetMultimap.of("ranger-yarn-audit",
"xasecure.policymgr.clientssl.keystore.password")
+ );
+
+ // end of expectation setting
+ replay(dao, em, controller, request, clusters, cluster, entity, entity2);
+ PowerMock.replay(SecretReference.class);
+
+ PredicateBuilder pb = new PredicateBuilder();
+ Predicate predicate =
pb.begin().property("Artifacts/cluster_name").equals("test-cluster").and().
+
property("Artifacts/artifact_name").equals(ArtifactResourceProvider.PROVISION_REQUEST_ARTIFACT_NAME).end().
+ toPredicate();
+
+ // when
+ Set<Resource> response = resourceProvider.getResources(request, predicate);
+
+ // then
+ assertEquals(1, response.size());
+ Resource resource = response.iterator().next();
+ Map<String, Map<String, Object>> responseProperties =
resource.getPropertiesMap();
+ Map<String, Object> artifactDataMap =
responseProperties.get("artifact_data");
+
+ Map<String, Object> expectedArtifactResultData = ImmutableMap.of(
+ "configurations",
+ ImmutableList.of(
+ ImmutableMap.of("ranger-yarn-audit",
+ ImmutableMap.of("properties",
+ ImmutableMap.of(
+ "xasecure.policymgr.clientssl.keystore.password",
+
"SECRET:ranger-yarn-audit:xasecure.policymgr.clientssl.keystore.password",
+ "xasecure.policymgr.clientssl.keystore.credential.file",
"jceks:/dev/null")))),
+ "default_password", "SECRET:default_password",
+ "mpack_instances",
+ ImmutableList.of(
+ ImmutableMap.of(
+ "name", "HDPCORE",
+ "version", "1.0.0")));
+
+ assertEquals(expectedArtifactResultData, artifactDataMap);
+ }
+
@Test
public void testGetResources_collection() throws Exception {
Set<String> propertyIds = new HashSet<>();
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
index ba358f3..adc0503 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
@@ -154,7 +154,7 @@ public class ClusterResourceProviderTest {
expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.anyObject(),
anyBoolean())).andReturn(null)
.once();
expect(topologyFactory.createProvisionClusterRequest(properties,
null)).andReturn(topologyRequest).once();
-
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
+ expect(topologyManager.provisionCluster(eq(topologyRequest),
anyObject())).andReturn(requestStatusResponse).once();
expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
replayAll();
@@ -186,7 +186,7 @@ public class ClusterResourceProviderTest {
expect(topologyFactory.createProvisionClusterRequest(properties,
securityConfiguration)).andReturn(topologyRequest).once();
expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.anyObject(),
anyBoolean())).andReturn
(securityConfiguration).once();
-
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
+ expect(topologyManager.provisionCluster(eq(topologyRequest),
anyObject())).andReturn(requestStatusResponse).once();
expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
replayAll();
@@ -472,7 +472,7 @@ public class ClusterResourceProviderTest {
expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.anyObject(),
anyBoolean())).andReturn(null)
.once();
expect(topologyFactory.createProvisionClusterRequest(eq(properties),
anyObject())).andReturn(topologyRequest).once();
-
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
+ expect(topologyManager.provisionCluster(eq(topologyRequest),
anyObject())).andReturn(requestStatusResponse).once();
expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
replayAll();
@@ -803,7 +803,7 @@ public class ClusterResourceProviderTest {
expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.anyObject(),
anyBoolean())).andReturn(null)
.once();
expect(topologyFactory.createProvisionClusterRequest(properties,
null)).andReturn(topologyRequest).once();
-
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
+ expect(topologyManager.provisionCluster(eq(topologyRequest),
anyObject())).andReturn(requestStatusResponse).once();
expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
replayAll();
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterTemplateArtifactPasswordReplacerTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterTemplateArtifactPasswordReplacerTest.java
new file mode 100644
index 0000000..1547f8f
--- /dev/null
+++
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterTemplateArtifactPasswordReplacerTest.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+
+import static java.util.stream.Collectors.toList;
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+import java.util.Map;
+
+import org.junit.Test;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+
+public class ClusterTemplateArtifactPasswordReplacerTest {
+
+ private static final List<Map<String, Object>> CONFIGURATION_1 =
+ createConfiguration("hdfs-site", map("property1", "value1"));
+
+ private static final List<Map<String, Object>> CONFIGURATION_2 =
+ createConfiguration("yarn-site", map("property2", "value2"));
+
+ private static final List<Map<String, Object>> CONFIGURATION_3 =
+ createConfiguration("hbase-site", map("property3", "value3"));
+
+ private static final List<Map<String, Object>> CONFIGURATION_1_REPLACED =
+ createConfiguration("hdfs-site", map("property1", "value1.replaced"));
+
+ private static final List<Map<String, Object>> CONFIGURATION_2_REPLACED =
+ createConfiguration("yarn-site", map("property2", "value2.replaced"));
+
+ private static final List<Map<String, Object>> CONFIGURATION_3_REPLACED =
+ createConfiguration("hbase-site", map("property3", "value3.replaced"));
+
+
+ private static final Map<String, Object> ORIGINAL_CLUSTER_TEMPLATE =
+ createClusterTemplateArtifact(CONFIGURATION_1, CONFIGURATION_2,
CONFIGURATION_3);
+
+ private static final Map<String, Object> EXPECTED_PROCESSED_CLUSTER_TEMPLATE
=
+ createClusterTemplateArtifact(CONFIGURATION_1_REPLACED,
CONFIGURATION_2_REPLACED, CONFIGURATION_3_REPLACED);
+
+ /**
+ * Test to prove that all configurations are replaced within a cluster
template structure no matter where
+ * they are defined.
+ * @throws Exception
+ */
+ @Test
+ public void testApplyToAllConfigurations() throws Exception {
+ // given
+ ClusterTemplateArtifactPasswordReplacer replacer = new
ClusterTemplateArtifactPasswordReplacer();
+
+ // when
+ Object clusterTemplateWithReplacedConfigs =
+ replacer.applyToAllConfigurations(
+ ORIGINAL_CLUSTER_TEMPLATE,
+ config -> replaceValues(config));
+
+ // then
+ assertEquals(EXPECTED_PROCESSED_CLUSTER_TEMPLATE,
clusterTemplateWithReplacedConfigs);
+ }
+
+ /**
+ * Creates a cluster template artifact with configuration objects at
different levels
+ * @param configuration1 mpack instance level configuration
+ * @param configuration2 service instance level configuration
+ * @param configuration3 cluster template level configuration
+ * @return
+ */
+ private static Map<String, Object> createClusterTemplateArtifact(Object
configuration1, Object configuration2, Object configuration3) {
+ return
+ map(
+ "mpack_instances",
+ list(
+ map(
+ "name", "HDPCORE",
+ "version", "1.0.0.0",
+ "configurations", configuration1,
+ "service_instances", list(
+ map(
+ "name", "ZK1",
+ "type", "ZOOKEEPER",
+ "configurations", configuration2
+ )
+ )
+ )
+ ),
+ "host_groups",
+ list(
+ map(
+ "name", "hostgroup1",
+ "host_count", "1"
+ )
+ ),
+ "configurations", configuration3
+ );
+ }
+
+ /**
+ *
+ * @param configType the config type, e.g. hdfs-site
+ * @param properties the properties
+ * @return a simple configuration in <code>[{"configType": {"properties":
{"key1": "value1", ...}}}]</code> format.
+ */
+ private static List<Map<String, Object>> createConfiguration(String
configType, Map<String, String> properties) {
+ return list(map(configType, map("properties", properties)));
+ }
+
+ /**
+ * Replaces property values by appending a ".replaced" suffix to each value.
E.g:
+ * <code>[ {"hdfs-site": {"properties": {"property": "value", ...}}}</code>
+ * will become
+ * <code>[ {"hdfs-site": {"properties": {"property": "value.replaced",
...}}}</code>
+ * @param configuration
+ * @return
+ */
+ private static List<Map<String, Object>> replaceValues(List<Map<String,
Object>> configuration) {
+ return configuration.stream().map(
+ config -> {
+ String configType = config.keySet().iterator().next();
+ Map<String, String> properties =
+ ((Map<String, Map<String,
String>>)config.values().iterator().next()).values().iterator().next();
+ ImmutableMap.Builder<String, Object> replacedProperties =
ImmutableMap.builder();
+ properties.entrySet().stream().forEach(
+ e -> replacedProperties.put(e.getKey(), e.getValue() + ".replaced"));
+ return ImmutableMap.<String, Object>of(configType, map("properties",
replacedProperties.build()));
+ }
+ ).collect(toList());
+ }
+
+ // ---- Convenience methods to create lists and maps
+
+ private static <T> List<T> list(T... elements) {
+ return ImmutableList.copyOf(elements);
+ }
+
+ private static <K, V> Map<K, V> map(K k1, V v1) {
+ return ImmutableMap.of(k1, v1);
+ }
+
+ private static <K, V> Map<K, V> map(K k1, V v1, K k2, V v2) {
+ return ImmutableMap.of(k1, v1, k2, v2);
+ }
+
+ private static <K, V> Map<K, V> map(K k1, V v1, K k2, V v2, K k3, V v3) {
+ return ImmutableMap.of(k1, v1, k2, v2, k3, v3);
+ }
+
+ private static <K, V> Map<K, V> map(K k1, V v1, K k2, V v2, K k3, V v3, K
k4, V v4) {
+ return ImmutableMap.of(k1, v1, k2, v2, k3, v3, k4, v4);
+ }
+
+}
\ No newline at end of file
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
index dab9062..8cce356 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
@@ -446,7 +446,7 @@ public class ClusterDeployWithStartOnlyTest extends
EasyMockSupport {
@Test
public void testProvisionCluster() throws Exception {
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
LogicalRequest request = topologyManager.getRequest(1);
assertEquals(request.getHostRequests().size(), 3);
}
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
index 13d5b17..9c54ff5 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
@@ -425,6 +425,6 @@ public class ClusterInstallWithoutStartOnComponentLevelTest
extends EasyMockSupp
@Test
public void testProvisionCluster() throws Exception {
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
}
}
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
index d0841ab..52e83b4 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
@@ -425,6 +425,6 @@ public class ClusterInstallWithoutStartTest extends
EasyMockSupport {
@Test
public void testProvisionCluster() throws Exception {
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
}
}
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
index 9f3a1f0..e60123e 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
@@ -415,7 +415,7 @@ public class TopologyManagerTest {
expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).anyTimes();
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
//todo: assertions
}
@@ -439,7 +439,7 @@ public class TopologyManagerTest {
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
//todo: assertions
}
@@ -462,7 +462,7 @@ public class TopologyManagerTest {
expect(logicalRequest.isFinished()).andReturn(true).anyTimes();
expect(logicalRequest.isSuccessful()).andReturn(true).anyTimes();
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
requestFinished();
Assert.assertTrue(topologyManager.isClusterProvisionWithBlueprintFinished(CLUSTER_ID));
}
@@ -486,7 +486,7 @@ public class TopologyManagerTest {
expect(logicalRequest.isFinished()).andReturn(true).anyTimes();
expect(logicalRequest.isSuccessful()).andReturn(false).anyTimes();
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
requestFinished();
Assert.assertTrue(topologyManager.isClusterProvisionWithBlueprintFinished(CLUSTER_ID));
}
@@ -509,7 +509,7 @@ public class TopologyManagerTest {
expect(persistedState.getProvisionRequest(CLUSTER_ID)).andReturn(logicalRequest).anyTimes();
expect(logicalRequest.isFinished()).andReturn(false).anyTimes();
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
requestFinished();
Assert.assertFalse(topologyManager.isClusterProvisionWithBlueprintFinished(CLUSTER_ID));
}
@@ -583,7 +583,7 @@ public class TopologyManagerTest {
replay(bpfMock);
expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).anyTimes();
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
topologyManager.scaleHosts(new ScaleClusterRequest(propertySet));
Assert.fail("InvalidTopologyException should have been thrown");
}
@@ -608,7 +608,7 @@ public class TopologyManagerTest {
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
}
@Test
@@ -633,7 +633,7 @@ public class TopologyManagerTest {
replayAll();
- topologyManager.provisionCluster(request);
+ topologyManager.provisionCluster(request, "{}");
}
private SettingEntity createQuickLinksSettingEntity(String content, long
timeStamp) {
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/utils/SecretReferenceTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/utils/SecretReferenceTest.java
new file mode 100644
index 0000000..004bf55
--- /dev/null
+++
b/ambari-server/src/test/java/org/apache/ambari/server/utils/SecretReferenceTest.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.utils;
+
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toMap;
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.topology.Configuration;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableSetMultimap;
+import com.google.common.collect.Multimap;
+
+public class SecretReferenceTest {
+
+ public static final String RANGER_HDFS_POLICYMGR_SSL =
"ranger-hdfs-policymgr-ssl.xml";
+ public static final String RANGER_HDFS_POLICYMGR_SSL_XML =
RANGER_HDFS_POLICYMGR_SSL + ".xml";
+
+ public static final Set<String> PASSWORD_PROPERTIES = ImmutableSet.of(
+ "xasecure.policymgr.clientssl.keystore.password",
+ "xasecure.policymgr.clientssl.truststore.password"
+ );
+
+ public static final List<String> PROPERTY_NAMES = ImmutableList.of(
+ "xasecure.policymgr.clientssl.keystore.password",
+ "xasecure.policymgr.clientssl.truststore.password",
+ "xasecure.policymgr.clientssl.keystore.credential.file",
+ "xasecure.policymgr.clientssl.truststore.credential.file"
+ );
+
+ public static final Multimap<String, String> EXPECTED_PASSWORD_PROPERTY_MAP =
+ ImmutableSetMultimap.<String, String>builder().
+ putAll(RANGER_HDFS_POLICYMGR_SSL, PASSWORD_PROPERTIES).
+ build();
+
+ private StackInfo hdpCore;
+
+ @Before
+ public void setup() throws Exception {
+ hdpCore = new StackInfo();
+ ServiceInfo hdfs = new ServiceInfo();
+ hdfs.setProperties(createProperties());
+ hdpCore.setServices(ImmutableList.of(hdfs));
+ }
+
+
+ private List<PropertyInfo> createProperties() {
+ return PROPERTY_NAMES.stream().map(propertyName -> {
+ PropertyInfo propertyInfo = new PropertyInfo();
+ propertyInfo.setFilename(RANGER_HDFS_POLICYMGR_SSL_XML);
+ propertyInfo.setName(propertyName);
+ if (PASSWORD_PROPERTIES.contains(propertyName)) {
+
propertyInfo.setPropertyTypes(ImmutableSet.of(PropertyInfo.PropertyType.PASSWORD));
+ }
+ return propertyInfo;
+ }).collect(toList());
+ }
+
+ @Test
+ public void testGetAllPasswordPropertiesInternal() {
+ assertEquals(EXPECTED_PASSWORD_PROPERTY_MAP,
+
SecretReference.getAllPasswordPropertiesInternal(ImmutableList.of(hdpCore)));
+ }
+
+
+ @Test
+ public void testReplacePasswordsInConfiguration() {
+ // given
+ Map<String, String> propertyMap =
+ PROPERTY_NAMES.stream().collect(toMap(Function.identity(), propertyName
-> "someValue"));
+
+ Map<String, Map<String, String>> properties =
+ ImmutableMap.of(RANGER_HDFS_POLICYMGR_SSL, propertyMap);
+
+ Map<String, Map<String, Map<String, String>>> attributes =
+ ImmutableMap.of(RANGER_HDFS_POLICYMGR_SSL, ImmutableMap.of("final",
propertyMap));
+
+ Configuration config = new Configuration(properties, attributes);
+
+ // when
+ Configuration replacedConfig =
+ SecretReference.replacePasswordsInConfigurations(config,
EXPECTED_PASSWORD_PROPERTY_MAP);
+
+ // then
+ Map<String, String> replacedPropertyMap =
+ PROPERTY_NAMES.stream().collect(toMap(
+ Function.identity(),
+ propertyName -> PASSWORD_PROPERTIES.contains(propertyName) ?
+ secret(propertyName) :
+ "someValue")
+ );
+
+ assertEquals(replacedPropertyMap,
replacedConfig.getProperties().entrySet().iterator().next().getValue());
+ assertEquals(replacedPropertyMap,
+
replacedConfig.getAttributes().entrySet().iterator().next().getValue().entrySet().iterator().next().getValue());
+ }
+
+ private static String secret(String propertyName) {
+ return "SECRET:" + RANGER_HDFS_POLICYMGR_SSL + ":" + propertyName;
+ }
+
+}
\ No newline at end of file
--
To stop receiving notification emails like this one, please contact
[email protected].