Github user Parth-Brahmbhatt commented on a diff in the pull request:

    https://github.com/apache/incubator-storm/pull/190#discussion_r15125867
  
    --- Diff: 
storm-core/src/jvm/backtype/storm/security/auth/kerberos/AutoHDFS.java ---
    @@ -0,0 +1,298 @@
    +/**
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package backtype.storm.security.auth.kerberos;
    +
    +import backtype.storm.Config;
    +import backtype.storm.security.auth.IAutoCredentials;
    +import backtype.storm.security.auth.ICredentialsRenewer;
    +import org.slf4j.Logger;
    +import org.slf4j.LoggerFactory;
    +
    +import javax.security.auth.Subject;
    +import javax.xml.bind.DatatypeConverter;
    +import java.io.*;
    +import java.lang.reflect.Method;
    +import java.net.URI;
    +import java.util.Collection;
    +import java.util.Map;
    +
    +/**
    + * Automatically get HDFS delegation tokens and push it to user's 
topology. The class
    + * assumes that HDFS configuration files are in your class path.
    + */
    +public class AutoHDFS implements IAutoCredentials, ICredentialsRenewer {
    +    private static final Logger LOG = 
LoggerFactory.getLogger(AutoHDFS.class);
    +    public static final String HDFS_CREDENTIALS = "HDFS_CREDENTIALS";
    +    private static final String CONF_KEYTAB_KEY = "keytab";
    +    private static final String CONF_USER_KEY = "user";
    +
    +    private Map conf;
    +
    +    public void prepare(Map conf) {
    +        this.conf = conf;
    +    }
    +
    +    @SuppressWarnings("unchecked")
    +    private Object getConfiguration() {
    +        try {
    +            final String hdfsUser = (String) conf.get(Config.HDFS_USER);
    +            final String hdfsUserKeyTab = (String) 
conf.get(Config.HDFS_USER_KEYTAB);
    +
    +            /**
    +             *  Configuration configuration = new Configuration();
    +             *  configuration.set(CONF_KEYTAB_KEY, hdfsUserKeyTab);
    +             *  configuration.set(CONF_USER_KEY, hdfsUser);
    +             */
    +            Class configurationClass = 
Class.forName("org.apache.hadoop.conf.Configuration");
    +            Object configuration = configurationClass.newInstance();
    +
    +            Method setMethod = configurationClass.getMethod("set", 
String.class, String.class);
    +            setMethod.invoke(configuration, CONF_KEYTAB_KEY, 
hdfsUserKeyTab);
    +            setMethod.invoke(configuration, CONF_USER_KEY, hdfsUser);
    +            /**
    +             * Following are the minimum set of configuration that needs 
to be set,  users should have hdfs-site.xml
    +             * and core-site.xml in the class path which should set these 
configuration.
    +             * setMethod.invoke(configuration, 
"hadoop.security.authentication", "KERBEROS");
    +             * 
setMethod.invoke(configuration,"dfs.namenode.kerberos.principal",
    +             *                                
"hdfs/[email protected]");
    +             * setMethod.invoke(configuration, 
"hadoop.security.kerberos.ticket.cache.path", "/tmp/krb5cc_1002");
    +             */
    +
    +            setMethod.invoke(configuration, 
"hadoop.security.authentication", "KERBEROS");
    +            setMethod.invoke(configuration, 
"dfs.namenode.kerberos.principal","hdfs/[email protected]");
    +            setMethod.invoke(configuration, 
"hadoop.security.kerberos.ticket.cache.path", "/tmp/krb5cc_1002");
    +
    +            //UserGroupInformation.setConfiguration(configuration);
    +            final Class ugiClass = 
Class.forName("org.apache.hadoop.security.UserGroupInformation");
    +            Method setConfigurationMethod = 
ugiClass.getMethod("setConfiguration", configurationClass);
    +            setConfigurationMethod.invoke(null, configuration);
    +            return configuration;
    +        }  catch (Exception e) {
    +            throw new RuntimeException(e);
    +        }
    +    }
    +
    +    @SuppressWarnings("unchecked")
    +    private void login(Object configuration) {
    +        try {
    +            Class configurationClass = 
Class.forName("org.apache.hadoop.conf.Configuration");
    +            final Class securityUtilClass = 
Class.forName("org.apache.hadoop.security.SecurityUtil");
    +            Method loginMethod = securityUtilClass.getMethod("login", 
configurationClass, String.class, String.class);
    +            loginMethod.invoke(null, configuration, CONF_KEYTAB_KEY, 
CONF_USER_KEY);
    +        } catch (Exception e) {
    +           throw new RuntimeException("Failed to login to hdfs .", e);
    +        }
    +    }
    +
    +    @SuppressWarnings("unchecked")
    +    private byte[] getHDFSCredsWithDelegationToken() throws Exception {
    +
    +        try {
    +            /**
    +             * What we want to do is following:
    +             *  Configuration configuration = new Configuration();
    +             *  configuration.set(CONF_KEYTAB_KEY, hdfsUserKeyTab);
    +             *  configuration.set(CONF_USER_KEY, hdfsUser);
    +             *  UserGroupInformation.setConfiguration(configuration);
    +             *  if(UserGroupInformation.isSecurityEnabled) {
    +             *      SecurityUtil.login(configuration, CONF_KEYTAB_KEY, 
CONF_USER_KEY);
    +             *      FileSystem fs = FileSystem.get(nameNodeURI, 
configuration, topologySubmitterUser);
    +             *      UserGroupInformation ugi = 
UserGroupInformation.getCurrentUser();
    +             *      UserGroupInformation proxyUser = 
UserGroupInformation.createProxyUser(topologySubmitterUser, ugi);
    +             *      Credentials credential= proxyUser.getCredentials();
    +             *      fs.addDelegationToken(hdfsUser, credential);
    +             * }
    +             * and then return the credential object as a bytearray.
    +             */
    +            Object configuration = getConfiguration();
    +            final Class ugiClass = 
Class.forName("org.apache.hadoop.security.UserGroupInformation");
    +            final Method isSecurityEnabledMethod = 
ugiClass.getDeclaredMethod("isSecurityEnabled");
    +            boolean isSecurityEnabled = 
(Boolean)isSecurityEnabledMethod.invoke(null);
    +            if(isSecurityEnabled) {
    +                login(configuration);
    +
    +                final URI nameNodeURI = URI.create((String) 
conf.get(Config.HDFS_NAMENODE_URL));
    +                final String topologySubmitterUser = (String) 
conf.get(Config.TOPOLOGY_SUBMITTER_USER);
    +                final String hdfsUser = (String) 
conf.get(Config.HDFS_USER);
    +
    +                Class configurationClass = 
Class.forName("org.apache.hadoop.conf.Configuration");
    +
    +                //FileSystem fs = FileSystem.get(nameNodeURI, 
configuration, topologySubmitterUser);
    +                Class fileSystemClass = 
Class.forName("org.apache.hadoop.fs.FileSystem");
    +                Method getMethod = fileSystemClass.getMethod("get", 
URI.class, configurationClass, String.class);
    +                Object fileSystem = getMethod.invoke(null, nameNodeURI, 
configuration, topologySubmitterUser);
    +
    +                //UserGroupInformation ugi = 
UserGroupInformation.getCurrentUser();
    +                Method getCurrentUserMethod = 
ugiClass.getMethod("getCurrentUser");
    +                final Object ugi = getCurrentUserMethod.invoke(null);
    +
    +                //UserGroupInformation proxyUser = 
UserGroupInformation.createProxyUser(topologySubmitterUser, ugi);
    +                Method createProxyUserMethod = 
ugiClass.getMethod("createProxyUser", String.class, ugiClass);
    +                Object proxyUGI = createProxyUserMethod.invoke(null, 
topologySubmitterUser, ugi);
    +
    +                //Credentials credential= proxyUser.getCredentials();
    +                Method getCredentialsMethod = 
ugiClass.getMethod("getCredentials");
    +                Object credentials = getCredentialsMethod.invoke(proxyUGI);
    +
    +                //fs.addDelegationToken(hdfsUser, credential);
    +                Class credentialClass = 
Class.forName("org.apache.hadoop.security.Credentials");
    +                Method addDelegationTokensMethod = 
fileSystemClass.getMethod("addDelegationTokens", String.class,
    +                        credentialClass);
    +                addDelegationTokensMethod.invoke(fileSystem, hdfsUser, 
credentials);
    +
    +
    +                ByteArrayOutputStream bao = new ByteArrayOutputStream();
    +                ObjectOutputStream out = new ObjectOutputStream(bao);
    +                Method writeMethod = credentialClass.getMethod("write", 
DataOutput.class);
    +                writeMethod.invoke(credentials, out);
    +                out.flush();
    +                out.close();
    +
    +                LOG.info(bao.toString());
    +                return bao.toByteArray();
    +            } else {
    +                throw new RuntimeException("Security is not enabled for 
HDFS");
    +            }
    +        } catch (Exception ex) {
    +            throw new RuntimeException("Failed to get delegation tokens." 
, ex);
    +        }
    +    }
    +
    +    @Override
    +    public void populateCredentials(Map<String, String> credentials) {
    --- End diff --
    
    completely my bad, I did not intend to keep these config lines uncommented. 
I had to keep them uncommented for testing as my hdfs-site.xml settings were 
not being picked up by the code due to some class path issue. Removing them.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

Reply via email to