[ 
https://issues.apache.org/jira/browse/FLINK-7118?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

mingleizhang updated FLINK-7118:
--------------------------------
    Description: 
Since flink no longer support hadoop 1.x version, we should remove it. Below 
code reside in {{org.apache.flink.api.java.hadoop.mapred.utils.HadoopUtils}}
        
{code:java}
public static JobContext instantiateJobContext(Configuration configuration, 
JobID jobId) throws Exception {
                try {
                        Class<?> clazz;
                        // for Hadoop 1.xx
                        if(JobContext.class.isInterface()) {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl", true, 
Thread.currentThread().getContextClassLoader());
                        }
                        // for Hadoop 2.xx
                        else {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.JobContext", true, 
Thread.currentThread().getContextClassLoader());
                        }
                        Constructor<?> constructor = 
clazz.getConstructor(Configuration.class, JobID.class);
                        JobContext context = (JobContext) 
constructor.newInstance(configuration, jobId);
                        
                        return context;
                } catch(Exception e) {
                        throw new Exception("Could not create instance of 
JobContext.");
                }
        }
{code}

And 


{code:java}
        public static TaskAttemptContext 
instantiateTaskAttemptContext(Configuration configuration,  TaskAttemptID 
taskAttemptID) throws Exception {
                try {
                        Class<?> clazz;
                        // for Hadoop 1.xx
                        if(JobContext.class.isInterface()) {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
                        }
                        // for Hadoop 2.xx
                        else {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.TaskAttemptContext");
                        }
                        Constructor<?> constructor = 
clazz.getConstructor(Configuration.class, TaskAttemptID.class);
                        TaskAttemptContext context = (TaskAttemptContext) 
constructor.newInstance(configuration, taskAttemptID);
                        
                        return context;
                } catch(Exception e) {
                        throw new Exception("Could not create instance of 
TaskAttemptContext.");
                }
        }
{code}

  was:
Since flink no longer support hadoop 1.x version, we should remove it. 

        
{code:java}
public static JobContext instantiateJobContext(Configuration configuration, 
JobID jobId) throws Exception {
                try {
                        Class<?> clazz;
                        // for Hadoop 1.xx
                        if(JobContext.class.isInterface()) {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl", true, 
Thread.currentThread().getContextClassLoader());
                        }
                        // for Hadoop 2.xx
                        else {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.JobContext", true, 
Thread.currentThread().getContextClassLoader());
                        }
                        Constructor<?> constructor = 
clazz.getConstructor(Configuration.class, JobID.class);
                        JobContext context = (JobContext) 
constructor.newInstance(configuration, jobId);
                        
                        return context;
                } catch(Exception e) {
                        throw new Exception("Could not create instance of 
JobContext.");
                }
        }
{code}

And 


{code:java}
        public static TaskAttemptContext 
instantiateTaskAttemptContext(Configuration configuration,  TaskAttemptID 
taskAttemptID) throws Exception {
                try {
                        Class<?> clazz;
                        // for Hadoop 1.xx
                        if(JobContext.class.isInterface()) {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
                        }
                        // for Hadoop 2.xx
                        else {
                                clazz = 
Class.forName("org.apache.hadoop.mapreduce.TaskAttemptContext");
                        }
                        Constructor<?> constructor = 
clazz.getConstructor(Configuration.class, TaskAttemptID.class);
                        TaskAttemptContext context = (TaskAttemptContext) 
constructor.newInstance(configuration, taskAttemptID);
                        
                        return context;
                } catch(Exception e) {
                        throw new Exception("Could not create instance of 
TaskAttemptContext.");
                }
        }
{code}



> Remove hadoop1.x code in HadoopUtils
> ------------------------------------
>
>                 Key: FLINK-7118
>                 URL: https://issues.apache.org/jira/browse/FLINK-7118
>             Project: Flink
>          Issue Type: Improvement
>          Components: Java API
>            Reporter: mingleizhang
>            Assignee: mingleizhang
>
> Since flink no longer support hadoop 1.x version, we should remove it. Below 
> code reside in {{org.apache.flink.api.java.hadoop.mapred.utils.HadoopUtils}}
>       
> {code:java}
> public static JobContext instantiateJobContext(Configuration configuration, 
> JobID jobId) throws Exception {
>               try {
>                       Class<?> clazz;
>                       // for Hadoop 1.xx
>                       if(JobContext.class.isInterface()) {
>                               clazz = 
> Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl", true, 
> Thread.currentThread().getContextClassLoader());
>                       }
>                       // for Hadoop 2.xx
>                       else {
>                               clazz = 
> Class.forName("org.apache.hadoop.mapreduce.JobContext", true, 
> Thread.currentThread().getContextClassLoader());
>                       }
>                       Constructor<?> constructor = 
> clazz.getConstructor(Configuration.class, JobID.class);
>                       JobContext context = (JobContext) 
> constructor.newInstance(configuration, jobId);
>                       
>                       return context;
>               } catch(Exception e) {
>                       throw new Exception("Could not create instance of 
> JobContext.");
>               }
>       }
> {code}
> And 
> {code:java}
>       public static TaskAttemptContext 
> instantiateTaskAttemptContext(Configuration configuration,  TaskAttemptID 
> taskAttemptID) throws Exception {
>               try {
>                       Class<?> clazz;
>                       // for Hadoop 1.xx
>                       if(JobContext.class.isInterface()) {
>                               clazz = 
> Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
>                       }
>                       // for Hadoop 2.xx
>                       else {
>                               clazz = 
> Class.forName("org.apache.hadoop.mapreduce.TaskAttemptContext");
>                       }
>                       Constructor<?> constructor = 
> clazz.getConstructor(Configuration.class, TaskAttemptID.class);
>                       TaskAttemptContext context = (TaskAttemptContext) 
> constructor.newInstance(configuration, taskAttemptID);
>                       
>                       return context;
>               } catch(Exception e) {
>                       throw new Exception("Could not create instance of 
> TaskAttemptContext.");
>               }
>       }
> {code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Reply via email to