Github user aljoscha commented on a diff in the pull request:

    https://github.com/apache/flink/pull/4636#discussion_r137213196
  
    --- Diff: 
flink-runtime/src/main/java/org/apache/flink/runtime/util/HadoopUtils.java ---
    @@ -0,0 +1,114 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.flink.runtime.util;
    +
    +import org.apache.flink.configuration.ConfigConstants;
    +
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.hadoop.io.Text;
    +import org.apache.hadoop.security.UserGroupInformation;
    +import org.apache.hadoop.security.token.Token;
    +import org.apache.hadoop.security.token.TokenIdentifier;
    +import org.slf4j.Logger;
    +import org.slf4j.LoggerFactory;
    +
    +import java.io.File;
    +import java.util.Collection;
    +
    +/**
    + * Utility class for working with Hadoop-related classes. This should only 
be used if Hadoop
    + * is on the classpath.
    + */
    +public class HadoopUtils {
    +
    +   private static final Logger LOG = 
LoggerFactory.getLogger(HadoopUtils.class);
    +
    +   private static final Text HDFS_DELEGATION_TOKEN_KIND = new 
Text("HDFS_DELEGATION_TOKEN");
    +
    +   public static Configuration 
getHadoopConfiguration(org.apache.flink.configuration.Configuration 
flinkConfiguration) {
    +
    +           Configuration retConf = new Configuration();
    +
    +           // We need to load both core-site.xml and hdfs-site.xml to 
determine the default fs path and
    +           // the hdfs configuration
    +           // Try to load HDFS configuration from Hadoop's own 
configuration files
    +           // 1. approach: Flink configuration
    +           final String hdfsDefaultPath = 
flinkConfiguration.getString(ConfigConstants
    +                   .HDFS_DEFAULT_CONFIG, null);
    --- End diff --
    
    indeed, I'm fixing


---

Reply via email to