Github user zentol commented on a diff in the pull request:

    https://github.com/apache/flink/pull/4636#discussion_r139481104
  
    --- Diff: 
flink-runtime/src/main/java/org/apache/flink/runtime/util/HadoopUtils.java ---
    @@ -0,0 +1,119 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.flink.runtime.util;
    +
    +import org.apache.flink.configuration.ConfigConstants;
    +
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.hadoop.io.Text;
    +import org.apache.hadoop.security.UserGroupInformation;
    +import org.apache.hadoop.security.token.Token;
    +import org.apache.hadoop.security.token.TokenIdentifier;
    +import org.slf4j.Logger;
    +import org.slf4j.LoggerFactory;
    +
    +import java.io.File;
    +import java.util.Collection;
    +
    +/**
    + * Utility class for working with Hadoop-related classes. This should only 
be used if Hadoop
    + * is on the classpath.
    + */
    +public class HadoopUtils {
    +
    +   private static final Logger LOG = 
LoggerFactory.getLogger(HadoopUtils.class);
    +
    +   private static final Text HDFS_DELEGATION_TOKEN_KIND = new 
Text("HDFS_DELEGATION_TOKEN");
    +
    +   public static Configuration 
getHadoopConfiguration(org.apache.flink.configuration.Configuration 
flinkConfiguration) {
    +
    +           Configuration result = new Configuration();
    +           boolean foundHadoopConfiguration = false;
    +
    +           // We need to load both core-site.xml and hdfs-site.xml to 
determine the default fs path and
    +           // the hdfs configuration
    +           // Try to load HDFS configuration from Hadoop's own 
configuration files
    +           // 1. approach: Flink configuration
    +           final String hdfsDefaultPath =
    +                   
flinkConfiguration.getString(ConfigConstants.HDFS_DEFAULT_CONFIG, null);
    +
    +           if (hdfsDefaultPath != null) {
    +                   result.addResource(new 
org.apache.hadoop.fs.Path(hdfsDefaultPath));
    +                   LOG.debug("Using hdfs-default configuration-file path 
form Flink config: {}", hdfsDefaultPath);
    +                   foundHadoopConfiguration = true;
    +           } else {
    +                   LOG.debug("Cannot find hdfs-default configuration-file 
path in Flink config.");
    +           }
    +
    +           final String hdfsSitePath = 
flinkConfiguration.getString(ConfigConstants.HDFS_SITE_CONFIG, null);
    +           if (hdfsSitePath != null) {
    +                   result.addResource(new 
org.apache.hadoop.fs.Path(hdfsSitePath));
    +                   LOG.debug("Using hdfs-site configuration-file path form 
Flink config: {}", hdfsSitePath);
    +                   foundHadoopConfiguration = true;
    +           } else {
    +                   LOG.debug("Cannot find hdfs-site configuration-file 
path in Flink config.");
    +           }
    +
    +           // 2. Approach environment variables
    +           String[] possibleHadoopConfPaths = new String[4];
    +           possibleHadoopConfPaths[0] = 
flinkConfiguration.getString(ConfigConstants.PATH_HADOOP_CONFIG, null);
    +           possibleHadoopConfPaths[1] = System.getenv("HADOOP_CONF_DIR");
    +
    +           if (System.getenv("HADOOP_HOME") != null) {
    +                   possibleHadoopConfPaths[2] = 
System.getenv("HADOOP_HOME") + "/conf";
    +                   possibleHadoopConfPaths[3] = 
System.getenv("HADOOP_HOME") + "/etc/hadoop"; // hadoop 2.2
    +           }
    +
    +           for (String possibleHadoopConfPath : possibleHadoopConfPaths) {
    +                   if (possibleHadoopConfPath != null) {
    +                           if (new File(possibleHadoopConfPath).exists()) {
    +                                   if (new File(possibleHadoopConfPath + 
"/core-site.xml").exists()) {
    +                                           result.addResource(new 
org.apache.hadoop.fs.Path(possibleHadoopConfPath + "/core-site.xml"));
    +                                           LOG.debug("Adding " + 
possibleHadoopConfPath + "/core-site.xml to hadoop configuration");
    +                                           foundHadoopConfiguration = true;
    +                                   }
    +                                   if (new File(possibleHadoopConfPath + 
"/hdfs-site.xml").exists()) {
    +                                           result.addResource(new 
org.apache.hadoop.fs.Path(possibleHadoopConfPath + "/hdfs-site.xml"));
    +                                           LOG.debug("Adding " + 
possibleHadoopConfPath + "/hdfs-site.xml to hadoop configuration");
    +                                           foundHadoopConfiguration = true;
    +                                   }
    +                           }
    +                   }
    +           }
    +
    +           LOG.debug("Could not find Hadoop configuration via any of the 
supported methods " +
    --- End diff --
    
    Missing the check whether we actually didn't find anything.


---

Reply via email to