This is an automated email from the ASF dual-hosted git repository.

gezapeti pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/oozie.git


The following commit(s) were added to refs/heads/master by this push:
     new 6e10bad  OOZIE-3575 Add credential support for cloud file systems 
(matijhs via gezapeti)
6e10bad is described below

commit 6e10bad6adb1c60707956e720f99bf2fc7922cd7
Author: Gezapeti Cseh <gezap...@apache.org>
AuthorDate: Wed Jan 15 15:49:16 2020 +0100

    OOZIE-3575 Add credential support for cloud file systems (matijhs via 
gezapeti)
---
 .../action/hadoop/CredentialsProviderFactory.java  |  14 +--
 .../oozie/action/hadoop/DistcpActionExecutor.java  |  58 +++++------
 .../oozie/action/hadoop/FileSystemCredentials.java |  90 +++++++++++++++++
 .../oozie/action/hadoop/JavaActionExecutor.java    | 100 ++++++++++---------
 .../oozie/action/hadoop/TestCredentials.java       | 109 +++++++++++++++++++--
 docs/src/site/markdown/DG_ActionAuthentication.md  |   7 +-
 release-log.txt                                    |   1 +
 7 files changed, 285 insertions(+), 94 deletions(-)

diff --git 
a/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java
 
b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java
index 8c1bd40..4b576b6 100644
--- 
a/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java
+++ 
b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java
@@ -18,10 +18,6 @@
 
 package org.apache.oozie.action.hadoop;
 
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -30,10 +26,16 @@ import org.apache.oozie.service.ConfigurationService;
 import org.apache.oozie.util.StringUtils;
 import org.apache.oozie.util.XLog;
 
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
 public class CredentialsProviderFactory {
     public static final String CRED_KEY = 
"oozie.credentials.credentialclasses";
     private static final XLog LOG = 
XLog.getLog(CredentialsProviderFactory.class);
-    public static final String HDFS = "hdfs";
+    public static final String FS = "filesystem";
+    public static final String NAMENODE_FS = "namenode_fs";
+    public static final String WORKFLOW_APP_FS = "workflow_app_fs";
     public static final String YARN = "yarnRM";
     public static final String JHS = "jhs";
     private static CredentialsProviderFactory instance;
@@ -76,7 +78,7 @@ public class CredentialsProviderFactory {
                 }
             }
         }
-        providerCache.put(HDFS, HDFSCredentials.class);
+        providerCache.put(FS, FileSystemCredentials.class);
         providerCache.put(YARN, YarnRMCredentials.class);
         providerCache.put(JHS, JHSCredentials.class);
     }
diff --git 
a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java 
b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
index a3daaa7..7b925af 100644
--- 
a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
+++ 
b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
@@ -22,25 +22,27 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.oozie.action.ActionExecutorException;
-import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.util.StringUtils;
 import org.apache.oozie.util.XLog;
 import org.jdom.Element;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
-public class DistcpActionExecutor extends JavaActionExecutor{
-    public static final String CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS = 
"org.apache.oozie.action.hadoop.DistcpMain";
+import static org.apache.oozie.action.hadoop.CredentialsProviderFactory.FS;
+import static 
org.apache.oozie.action.hadoop.CredentialsProviderFactory.NAMENODE_FS;
+import static 
org.apache.oozie.action.hadoop.FileSystemCredentials.FILESYSTEM_PATH;
+
+public class DistcpActionExecutor extends JavaActionExecutor {
+    private static final String CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS = 
"org.apache.oozie.action.hadoop.DistcpMain";
     private static final String DISTCP_MAIN_CLASS_NAME = 
"org.apache.hadoop.tools.DistCp";
     public static final String CLASS_NAMES = "oozie.actions.main.classnames";
     private static final XLog LOG = XLog.getLog(DistcpActionExecutor.class);
-    public static final String DISTCP_TYPE = "distcp";
 
     /**
      * Comma separated list of NameNode hosts to obtain delegation token(s) 
for.
      */
-    private static final String OOZIE_LAUNCHER_MAPREDUCE_JOB_HDFS_SERVERS = 
"oozie.launcher.mapreduce.job.hdfs-servers";
+    static final String OOZIE_LAUNCHER_MAPREDUCE_JOB_HDFS_SERVERS = 
"oozie.launcher.mapreduce.job.hdfs-servers";
 
     /**
      * Comma separated list to instruct ResourceManagers on either cluster to 
skip delegation token renewal for NameNode hosts.
@@ -67,35 +69,13 @@ public class DistcpActionExecutor extends 
JavaActionExecutor{
         List<Class<?>> classes = new ArrayList<>();
         try {
             classes.add(Class.forName(CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS));
-        }
-        catch (ClassNotFoundException e) {
+        } catch (ClassNotFoundException e) {
             throw new RuntimeException("Class not found", e);
         }
         return classes;
     }
 
     /**
-     * This function returns the Action classes names from the configuration
-     *
-     * @param type This is type of the action classes
-     * @return Name of the class from the configuration
-     */
-    public static String getClassNamebyType(String type){
-        String classname = null;
-        for (String function : ConfigurationService.getStrings(CLASS_NAMES)) {
-            function = StringUtils.trim(function);
-            LOG.debug("class for Distcp Action: " + function);
-            String[] str = function.split("=");
-            if (str.length > 0) {
-                if(type.equalsIgnoreCase(str[0])){
-                    classname = new String(str[1]);
-                }
-            }
-        }
-        return classname;
-    }
-
-    /**
      * Return the sharelib name for the action.
      *
      * @return returns <code>distcp</code>.
@@ -114,17 +94,25 @@ public class DistcpActionExecutor extends 
JavaActionExecutor{
     /**
      * Extracts information required for DistCp action between secure clusters 
(in the same or distinct Kerberos realms)
      *
-     * @param jobconf workflow action configuration
+     * @param actionConf workflow action configuration
+     * @param credPropertiesMap Map of defined workflow credentials
      */
     @Override
-    protected void setActionTokenProperties(final Configuration jobconf) {
-        final String hdfsServers = 
jobconf.get(OOZIE_LAUNCHER_MAPREDUCE_JOB_HDFS_SERVERS);
+    void addNameNodeCredentials(Configuration actionConf, Map<String, 
CredentialsProperties> credPropertiesMap) {
+        String hdfsServers = 
actionConf.get(OOZIE_LAUNCHER_MAPREDUCE_JOB_HDFS_SERVERS);
         if (hdfsServers != null) {
-            jobconf.set(MRJobConfig.JOB_NAMENODES, hdfsServers);
-            final String tokenRenewalExclude = 
jobconf.get(OOZIE_LAUNCHER_MAPREDUCE_JOB_HDFS_SERVERS_TOKEN_RENEWAL_EXCLUDE);
+            LOG.info("Overriding {0} default value from action config with 
{1}",
+                    MRJobConfig.JOB_NAMENODES, hdfsServers);
+            actionConf.set(MRJobConfig.JOB_NAMENODES, hdfsServers);
+            final String tokenRenewalExclude = 
actionConf.get(OOZIE_LAUNCHER_MAPREDUCE_JOB_HDFS_SERVERS_TOKEN_RENEWAL_EXCLUDE);
             if (tokenRenewalExclude != null) {
-                jobconf.set(JOB_NAMENODES_TOKEN_RENEWAL_EXCLUDE, 
tokenRenewalExclude);
+                actionConf.set(JOB_NAMENODES_TOKEN_RENEWAL_EXCLUDE, 
tokenRenewalExclude);
             }
+            CredentialsProperties fsCredentialProperties = new 
CredentialsProperties(NAMENODE_FS, FS);
+            fsCredentialProperties.getProperties().put(FILESYSTEM_PATH, 
hdfsServers);
+            credPropertiesMap.put(NAMENODE_FS, fsCredentialProperties);
+        } else {
+            super.addNameNodeCredentials(actionConf, credPropertiesMap);
         }
     }
 }
diff --git 
a/core/src/main/java/org/apache/oozie/action/hadoop/FileSystemCredentials.java 
b/core/src/main/java/org/apache/oozie/action/hadoop/FileSystemCredentials.java
new file mode 100644
index 0000000..d1164c8
--- /dev/null
+++ 
b/core/src/main/java/org/apache/oozie/action/hadoop/FileSystemCredentials.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.action.hadoop;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.security.TokenCache;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.oozie.ErrorCode;
+import org.apache.oozie.action.ActionExecutor;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.UserGroupInformationService;
+import org.apache.oozie.util.XLog;
+
+import java.security.PrivilegedExceptionAction;
+import java.util.Collection;
+
+
+public class FileSystemCredentials implements CredentialsProvider {
+    protected XLog LOG = XLog.getLog(getClass());
+
+    static final String FILESYSTEM_PATH = "filesystem.path";
+
+    /**
+     * Add an HDFS_DELEGATION_TOKEN to the {@link Credentials} provided.
+     * This is also important to ensure that log aggregation works correctly 
from the NM.
+     *
+     * @param credentials the credentials object which is updated
+     * @param config      launcher AM configuration
+     * @param props       properties for getting credential token or 
certificate
+     * @param context     workflow context
+     * @throws Exception thrown if failed
+     */
+    @Override
+    public void updateCredentials(Credentials credentials, Configuration 
config, CredentialsProperties props,
+                                  ActionExecutor.Context context) throws 
Exception {
+        final String[] fileSystemPaths = 
StringUtils.getStrings(props.getProperties().get(FILESYSTEM_PATH));
+        if (fileSystemPaths == null) {
+            throw new CredentialException(ErrorCode.E0510,
+                    FILESYSTEM_PATH + " property is required to get filesystem 
type credential");
+        }
+
+        final Path[] paths = new Path[fileSystemPaths.length];
+        for (int i = 0; i != fileSystemPaths.length; ++i) {
+            paths[i] = new Path(fileSystemPaths[i]);
+        }
+
+        final UserGroupInformation ugi = 
Services.get().get(UserGroupInformationService.class)
+                .getProxyUser(context.getWorkflow().getUser());
+        LOG.info("Obtaining delegation tokens");
+        obtainTokens(credentials, config, paths, ugi);
+    }
+
+    private void obtainTokens(Credentials credentials, Configuration config, 
Path[] paths, UserGroupInformation ugi)
+            throws java.io.IOException, InterruptedException {
+        ugi.doAs(
+                new PrivilegedExceptionAction<Void>() {
+                    @Override
+                    public Void run() throws Exception {
+                        TokenCache.obtainTokensForNamenodes(credentials, 
paths, config);
+                        Collection<Token<? extends TokenIdentifier>> creds = 
credentials.getAllTokens();
+                        for (Token tok : creds) {
+                            LOG.debug("Tokens in TokenCache: {0}", 
tok.getService());
+                        }
+                        return null;
+                    }
+                }
+        );
+    }
+}
diff --git 
a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java 
b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index dd13744..0940703 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -19,33 +19,12 @@
 package org.apache.oozie.action.hadoop;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.primitives.Ints;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.StringReader;
-import java.net.ConnectException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.security.PrivilegedExceptionAction;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
 import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -113,14 +92,37 @@ import org.jdom.Element;
 import org.jdom.JDOMException;
 import org.jdom.Namespace;
 
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.StringReader;
+import java.net.ConnectException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+import java.security.PrivilegedExceptionAction;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Objects;
 import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableMap;
-
+import static org.apache.oozie.action.hadoop.CredentialsProviderFactory.JHS;
+import static 
org.apache.oozie.action.hadoop.CredentialsProviderFactory.NAMENODE_FS;
+import static 
org.apache.oozie.action.hadoop.CredentialsProviderFactory.WORKFLOW_APP_FS;
+import static org.apache.oozie.action.hadoop.CredentialsProviderFactory.YARN;
+import static 
org.apache.oozie.action.hadoop.FileSystemCredentials.FILESYSTEM_PATH;
 
 public class JavaActionExecutor extends ActionExecutor {
     public static final String RUNNING = "RUNNING";
@@ -1071,15 +1073,17 @@ public class JavaActionExecutor extends ActionExecutor {
             Credentials credentials = new Credentials();
             Configuration launcherConf = createLauncherConf(actionFs, context, 
action, actionXml, actionConf);
             yarnClient = createYarnClient(context, launcherConf);
-            Map<String, CredentialsProperties> credentialsProperties = 
setCredentialPropertyToActionConf(context,
+            Map<String, CredentialsProperties> credPropertiesMap = 
setCredentialPropertyToActionConf(context,
                     action, actionConf);
             if (UserGroupInformation.isSecurityEnabled()) {
-                
addHadoopCredentialPropertiesToActionConf(credentialsProperties);
+                addNameNodeCredentials(actionConf, credPropertiesMap);
+                addWorkflowAppFileSystemCredentials(context, 
credPropertiesMap);
+                addYarnCredentials(credPropertiesMap);
             }
             // Adding if action need to set more credential tokens
             Configuration credentialsConf = new Configuration(false);
             XConfiguration.copy(actionConf, credentialsConf);
-            setCredentialTokens(credentials, credentialsConf, context, action, 
credentialsProperties);
+            setCredentialTokens(credentials, credentialsConf, context, action, 
credPropertiesMap);
 
             // copy back new entries from credentialsConf
             for (Entry<String, String> entry : credentialsConf) {
@@ -1189,11 +1193,25 @@ public class JavaActionExecutor extends ActionExecutor {
         return context.getVar(OOZIE_ACTION_NAME);
     }
 
-    private void addHadoopCredentialPropertiesToActionConf(Map<String, 
CredentialsProperties> credentialsProperties) {
-        LOG.info("Adding default credentials for action: hdfs, yarn and jhs");
-        addHadoopCredentialProperties(credentialsProperties, 
CredentialsProviderFactory.HDFS);
-        addHadoopCredentialProperties(credentialsProperties, 
CredentialsProviderFactory.YARN);
-        addHadoopCredentialProperties(credentialsProperties, 
CredentialsProviderFactory.JHS);
+    void addNameNodeCredentials(Configuration actionConf, Map<String, 
CredentialsProperties> credPropertiesMap) {
+        LOG.info("Adding default credentials for action: namenode");
+        final String jobNameNodes = actionConf.get(MRJobConfig.JOB_NAMENODES);
+        CredentialsProperties hdfsCredProps = new 
CredentialsProperties(NAMENODE_FS, CredentialsProviderFactory.FS);
+        hdfsCredProps.getProperties().put(FILESYSTEM_PATH, jobNameNodes);
+        credPropertiesMap.put(NAMENODE_FS, hdfsCredProps);
+    }
+
+    void addWorkflowAppFileSystemCredentials(Context context, Map<String, 
CredentialsProperties> credPropertiesMap) {
+        LOG.info("Adding workflow application file system credentials for 
action");
+        CredentialsProperties fsCredProps = new 
CredentialsProperties(WORKFLOW_APP_FS, CredentialsProviderFactory.FS);
+        fsCredProps.getProperties().put(FILESYSTEM_PATH, 
context.getWorkflow().getAppPath());
+        credPropertiesMap.put(WORKFLOW_APP_FS, fsCredProps);
+    }
+
+    private void addYarnCredentials(Map<String, CredentialsProperties> 
credPropertiesMap) {
+        LOG.info("Adding default credentials for action: yarn and jhs");
+        addHadoopCredentialProperties(credPropertiesMap, YARN);
+        addHadoopCredentialProperties(credPropertiesMap, JHS);
     }
 
     private void addHadoopCredentialProperties(Map<String, 
CredentialsProperties> credentialsProperties, String type) {
@@ -1533,7 +1551,6 @@ public class JavaActionExecutor extends ActionExecutor {
             return;
         }
 
-        setActionTokenProperties(jobconf);
         // Make sure we're logged into Kerberos; if not, or near expiration, 
it will relogin
         CredentialsProviderFactory.ensureKerberosLogin();
         for (Entry<String, CredentialsProperties> entry : 
credPropertiesMap.entrySet()) {
@@ -1548,7 +1565,7 @@ public class JavaActionExecutor extends ActionExecutor {
                 } else {
                     LOG.debug("Credentials object is null for name= " + 
credName + ", type=" + credProps.getType());
                     throw new 
ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA020",
-                            "Could not load credentials of type [{0}] with 
name [{1}]]; perhaps it was not defined"
+                            "Could not load credentials of type [{0}] with 
name [{1}]; perhaps it was not defined"
                                     + " in oozie-site.xml?", 
credProps.getType(), credName);
                 }
             }
@@ -1561,15 +1578,6 @@ public class JavaActionExecutor extends ActionExecutor {
         return context != null && action != null && credPropertiesMap != null;
     }
 
-    /**
-     * Subclasses may override this method in order to take additional actions 
required for obtaining credential token(s).
-     *
-     * @param jobconf workflow action configuration
-     */
-    protected void setActionTokenProperties(final Configuration jobconf) {
-        // nop
-    }
-
     protected HashMap<String, CredentialsProperties> 
getActionCredentialsProperties(Context context,
             WorkflowAction action) throws Exception {
         HashMap<String, CredentialsProperties> props = new HashMap<String, 
CredentialsProperties>();
@@ -1624,7 +1632,7 @@ public class JavaActionExecutor extends ActionExecutor {
             }
             if (credProp == null && credName != null) {
                 throw new 
ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA021",
-                        "Could not load credentials with name [{0}]].", 
credName);
+                        "Could not load credentials with name [{0}].", 
credName);
             }
         } else {
             LOG.debug("credentials is null for the action");
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestCredentials.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestCredentials.java
index 7601628..a411d27 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestCredentials.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestCredentials.java
@@ -18,17 +18,36 @@
 
 package org.apache.oozie.action.hadoop;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.oozie.action.hadoop.CredentialsProperties;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.oozie.WorkflowJobBean;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * Test Credentials
- *
  */
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
-public class TestCredentials {
+public class TestCredentials extends ActionExecutorTestCase {
+
+    private Map<String, CredentialsProperties> credPropertiesMap;
+
+    @Before
+    public void setUp() throws Exception {
+        super.setUp();
+        credPropertiesMap = new HashMap<>();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        super.tearDown();
+    }
 
     @Test
     public void testHbaseCredentials() {
@@ -36,8 +55,86 @@ public class TestCredentials {
         prop.getProperties().put("hbase.zookeeper.quorum", "dummyHost");
         HbaseCredentials hb = new HbaseCredentials();
         JobConf jc = new JobConf(false);
-        hb.copyHbaseConfToJobConf(jc,prop);
+        hb.copyHbaseConfToJobConf(jc, prop);
         assertEquals("dummyHost", jc.get("hbase.zookeeper.quorum"));
     }
 
+    @Test
+    public void testThrowErrorWhenMissingFileSystemPathProperty() throws 
Exception {
+        CredentialsProperties props = new CredentialsProperties("filesystem", 
"filesystem");
+        String exMsg = FileSystemCredentials.FILESYSTEM_PATH + " property is 
required to get filesystem type credential";
+        FileSystemCredentials fs = new FileSystemCredentials();
+        try {
+            fs.updateCredentials(null, null, props, null);
+            Assert.fail("No exception was thrown!");
+        } catch (CredentialException ex) {
+            Assert.assertTrue(ex.getMessage().contains(exMsg));
+        }
+    }
+
+    /**
+     * Test adding credential for default NameNode defined in 
mapreduce.job.hdfs-servers.
+     */
+    @Test
+    public void testAddNameNodeCredentials() {
+        Configuration actionConf = new Configuration();
+        String jobNameNodes = "hdfs://namenode1";
+        actionConf.set(MRJobConfig.JOB_NAMENODES, jobNameNodes);
+        new JavaActionExecutor().addNameNodeCredentials(actionConf, 
credPropertiesMap);
+        verifyCredentialsMapping(jobNameNodes);
+    }
+
+    /**
+     * Test adding credentials for multiple NameNodes/resources in case of 
cross cluster distcp action.
+     * Should override the value of mapreduce.job.hdfs-servers with 
oozie.launcher.mapreduce.job.hdfs-servers, in case its defined.
+     */
+    @Test
+    public void testAddDistCpNameNodeCredentialsOverrideConf() {
+        Configuration actionConf = new Configuration();
+        String jobNameNodes = 
"hdfs://namenode1,abfs://resource2,s3a://resource3";
+        String defaultNameNode = "hdfs://namenode1";
+        
actionConf.set(DistcpActionExecutor.OOZIE_LAUNCHER_MAPREDUCE_JOB_HDFS_SERVERS, 
jobNameNodes);
+        actionConf.set(MRJobConfig.JOB_NAMENODES, defaultNameNode);
+        new DistcpActionExecutor().addNameNodeCredentials(actionConf, 
credPropertiesMap);
+        verifyCredentialsMapping(jobNameNodes);
+        Assert.assertEquals(actionConf.get(MRJobConfig.JOB_NAMENODES), 
jobNameNodes);
+        Assert.assertNotEquals(actionConf.get(MRJobConfig.JOB_NAMENODES), 
defaultNameNode);
+    }
+
+    /**
+     * Test adding credentials for multiple NameNodes in case of cross cluster 
distcp action.
+     * Should NOT override the value of mapreduce.job.hdfs-servers, as 
oozie.launcher.mapreduce.job.hdfs-servers is not defined.
+     */
+    @Test
+    public void testAddDistCpNameNodeCredentialsDefaultConf() {
+        Configuration actionConf = new Configuration();
+        String jobNameNodes = 
"hdfs://namenode1,hdfs://namenode2,hdfs://namenode3";
+        actionConf.set(MRJobConfig.JOB_NAMENODES, jobNameNodes);
+        new DistcpActionExecutor().addNameNodeCredentials(actionConf, 
credPropertiesMap);
+        verifyCredentialsMapping(jobNameNodes);
+        Assert.assertEquals(actionConf.get(MRJobConfig.JOB_NAMENODES), 
jobNameNodes);
+    }
+
+    /**
+     * Test adding credentials for workflow application path.
+     */
+    @Test
+    public void testAddWorkflowAppFileSystemCredentials() {
+        String wfAppPath = "hdfs://namenode1/user/test_user/app/";
+        Context context = Mockito.mock(Context.class);
+        WorkflowJobBean wf = Mockito.mock(WorkflowJobBean.class);
+        Mockito.when(context.getWorkflow()).thenReturn(wf);
+        Mockito.when(wf.getAppPath()).thenReturn(wfAppPath);
+        new JavaActionExecutor().addWorkflowAppFileSystemCredentials(context, 
credPropertiesMap);
+        CredentialsProperties props = 
credPropertiesMap.get(CredentialsProviderFactory.WORKFLOW_APP_FS);
+        
Assert.assertEquals(props.getProperties().get(FileSystemCredentials.FILESYSTEM_PATH),
 wfAppPath);
+    }
+
+    private void verifyCredentialsMapping(String jobNameNodes) {
+        CredentialsProperties props = 
credPropertiesMap.get(CredentialsProviderFactory.NAMENODE_FS);
+        Assert.assertNotNull(props);
+        Assert.assertEquals(CredentialsProviderFactory.FS, props.getType());
+        Assert.assertEquals(CredentialsProviderFactory.NAMENODE_FS, 
props.getName());
+        Assert.assertEquals(jobNameNodes, 
props.getProperties().get(FileSystemCredentials.FILESYSTEM_PATH));
+    }
 }
diff --git a/docs/src/site/markdown/DG_ActionAuthentication.md 
b/docs/src/site/markdown/DG_ActionAuthentication.md
index d280baf..e283b51 100644
--- a/docs/src/site/markdown/DG_ActionAuthentication.md
+++ b/docs/src/site/markdown/DG_ActionAuthentication.md
@@ -110,6 +110,8 @@ Oozie currently comes with the following Credentials 
implementations:
    1. HCatalog and Hive Metastore: 
`org.apache.oozie.action.hadoop.HCatCredentials`
    1. HBase: `org.apache.oozie.action.hadoop.HBaseCredentials`
    1. Hive Server 2: `org.apache.oozie.action.hadoop.Hive2Credentials`
+   1. File system (for workflows that require cross cluster or cloud storage 
access):
+   `org.apache.oozie.action.hadoop.FileSystemCredentials`
 
 HCatCredentials requires these two properties:
 
@@ -123,10 +125,13 @@ HBase does not require any additional properties since 
the hbase-site.xml on the
 to obtain a delegation token; though properties can be overwritten here if 
desired.
 
 Hive2Credentials requires these two properties:
-
    1. `hive2.server.principal`
    1. `hive2.jdbc.url`
 
+FileSystemCredentials requires the following property:
+
+   1. `filesystem.path` - Cloud storage bucket or namenode path, where the 
action will need access rights in runtime.
+
 [::Go back to Oozie Documentation Index::](index.html)
 
 
diff --git a/release-log.txt b/release-log.txt
index 999371e..cf0f505 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 5.3.0 release (trunk - unreleased)
 
+OOZIE-3575 Add credential support for cloud file systems (matijhs via gezapeti)
 OOZIE-3579 [docs] Fix typos in coordinator documentation (qsbao via asalamon74)
 OOZIE-3066 Possibility to set retry-max globally (qsbao via asalamon74)
 OOZIE-3578 MapReduce counters cannot be used over 120 (dionusos via pbacsko, 
gezapeti)

Reply via email to