Author: thejas Date: Fri Jan 17 03:19:30 2014 New Revision: 1559002 URL: http://svn.apache.org/r1559002 Log: HIVE-6159 : Hive uses deprecated hadoop configuration in Hadoop 2.0 (shanyu zhao via Thejas Nair)
Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java hive/trunk/ql/src/test/results/clientpositive/overridden_confs.q.out hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1559002&r1=1559001&r2=1559002&view=diff ============================================================================== --- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original) +++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Jan 17 03:19:30 2014 @@ -146,7 +146,6 @@ public class HiveConf extends Configurat */ public static final HiveConf.ConfVars[] dbVars = { HiveConf.ConfVars.HADOOPBIN, - HiveConf.ConfVars.HADOOPJT, HiveConf.ConfVars.METASTOREWAREHOUSE, HiveConf.ConfVars.SCRATCHDIR }; @@ -231,22 +230,23 @@ public class HiveConf extends Configurat // a symbolic name to reference in the Hive source code. Properties with non-null // values will override any values set in the underlying Hadoop configuration. HADOOPBIN("hadoop.bin.path", findHadoopBinary()), - HADOOPFS("fs.default.name", null), HIVE_FS_HAR_IMPL("fs.har.impl", "org.apache.hadoop.hive.shims.HiveHarFileSystem"), - HADOOPMAPFILENAME("map.input.file", null), - HADOOPMAPREDINPUTDIR("mapred.input.dir", null), - HADOOPMAPREDINPUTDIRRECURSIVE("mapred.input.dir.recursive", false), - HADOOPJT("mapred.job.tracker", null), - MAPREDMAXSPLITSIZE("mapred.max.split.size", 256000000L), - MAPREDMINSPLITSIZE("mapred.min.split.size", 1L), - MAPREDMINSPLITSIZEPERNODE("mapred.min.split.size.per.rack", 1L), - MAPREDMINSPLITSIZEPERRACK("mapred.min.split.size.per.node", 1L), + HADOOPFS(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPFS"), null), + HADOOPMAPFILENAME(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPMAPFILENAME"), null), + HADOOPMAPREDINPUTDIR(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPMAPREDINPUTDIR"), null), + HADOOPMAPREDINPUTDIRRECURSIVE(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPMAPREDINPUTDIRRECURSIVE"), false), + MAPREDMAXSPLITSIZE(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMAXSPLITSIZE"), 256000000L), + MAPREDMINSPLITSIZE(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZE"), 1L), + MAPREDMINSPLITSIZEPERNODE(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZEPERNODE"), 1L), + MAPREDMINSPLITSIZEPERRACK(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZEPERRACK"), 1L), // The number of reduce tasks per job. Hadoop sets this value to 1 by default // By setting this property to -1, Hive will automatically determine the correct // number of reducers. - HADOOPNUMREDUCERS("mapred.reduce.tasks", -1), - HADOOPJOBNAME("mapred.job.name", null), - HADOOPSPECULATIVEEXECREDUCERS("mapred.reduce.tasks.speculative.execution", true), + HADOOPNUMREDUCERS(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPNUMREDUCERS"), -1), + HADOOPJOBNAME(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPJOBNAME"), null), + HADOOPSPECULATIVEEXECREDUCERS(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPSPECULATIVEEXECREDUCERS"), true), + MAPREDSETUPCLEANUPNEEDED(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDSETUPCLEANUPNEEDED"), false), + MAPREDTASKCLEANUPNEEDED(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDTASKCLEANUPNEEDED"), false), // Metastore stuff. Be sure to update HiveConf.metaVars when you add // something here! Modified: hive/trunk/ql/src/test/results/clientpositive/overridden_confs.q.out URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/overridden_confs.q.out?rev=1559002&r1=1559001&r2=1559002&view=diff ============================================================================== --- hive/trunk/ql/src/test/results/clientpositive/overridden_confs.q.out (original) +++ hive/trunk/ql/src/test/results/clientpositive/overridden_confs.q.out Fri Jan 17 03:19:30 2014 @@ -2,7 +2,6 @@ PREHOOK: query: select count(*) from src PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -Key: mapred.job.tracker, Value: local Key: hive.exec.post.hooks, Value: org.apache.hadoop.hive.ql.hooks.VerifyOverriddenConfigsHook Key: hive.config.doesnt.exit, Value: abc 500 Modified: hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1559002&r1=1559001&r2=1559002&view=diff ============================================================================== --- hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original) +++ hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Fri Jan 17 03:19:30 2014 @@ -34,6 +34,8 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; +import java.util.HashMap; +import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.login.LoginException; @@ -753,4 +755,22 @@ public class Hadoop20Shims implements Ha public FileSystem createProxyFileSystem(FileSystem fs, URI uri) { return new ProxyFileSystem(fs, uri); } + @Override + public Map<String, String> getHadoopConfNames() { + Map<String, String> ret = new HashMap<String, String>(); + ret.put("HADOOPFS", "fs.default.name"); + ret.put("HADOOPMAPFILENAME", "map.input.file"); + ret.put("HADOOPMAPREDINPUTDIR", "mapred.input.dir"); + ret.put("HADOOPMAPREDINPUTDIRRECURSIVE", "mapred.input.dir.recursive"); + ret.put("MAPREDMAXSPLITSIZE", "mapred.max.split.size"); + ret.put("MAPREDMINSPLITSIZE", "mapred.min.split.size"); + ret.put("MAPREDMINSPLITSIZEPERNODE", "mapred.min.split.size.per.rack"); + ret.put("MAPREDMINSPLITSIZEPERRACK", "mapred.min.split.size.per.node"); + ret.put("HADOOPNUMREDUCERS", "mapred.reduce.tasks"); + ret.put("HADOOPJOBNAME", "mapred.job.name"); + ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapred.reduce.tasks.speculative.execution"); + ret.put("MAPREDSETUPCLEANUPNEEDED", "mapred.committer.job.setup.cleanup.needed"); + ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed"); + return ret; + } } Modified: hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1559002&r1=1559001&r2=1559002&view=diff ============================================================================== --- hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original) +++ hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Fri Jan 17 03:19:30 2014 @@ -24,6 +24,8 @@ import java.net.URL; import java.util.Comparator; import java.util.Iterator; import java.net.URI; +import java.util.HashMap; +import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; @@ -390,4 +392,22 @@ public class Hadoop20SShims extends Hado public FileSystem createProxyFileSystem(FileSystem fs, URI uri) { return new ProxyFileSystem(fs, uri); } + @Override + public Map<String, String> getHadoopConfNames() { + Map<String, String> ret = new HashMap<String, String>(); + ret.put("HADOOPFS", "fs.default.name"); + ret.put("HADOOPMAPFILENAME", "map.input.file"); + ret.put("HADOOPMAPREDINPUTDIR", "mapred.input.dir"); + ret.put("HADOOPMAPREDINPUTDIRRECURSIVE", "mapred.input.dir.recursive"); + ret.put("MAPREDMAXSPLITSIZE", "mapred.max.split.size"); + ret.put("MAPREDMINSPLITSIZE", "mapred.min.split.size"); + ret.put("MAPREDMINSPLITSIZEPERNODE", "mapred.min.split.size.per.rack"); + ret.put("MAPREDMINSPLITSIZEPERRACK", "mapred.min.split.size.per.node"); + ret.put("HADOOPNUMREDUCERS", "mapred.reduce.tasks"); + ret.put("HADOOPJOBNAME", "mapred.job.name"); + ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapred.reduce.tasks.speculative.execution"); + ret.put("MAPREDSETUPCLEANUPNEEDED", "mapred.committer.job.setup.cleanup.needed"); + ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed"); + return ret; + } } Modified: hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1559002&r1=1559001&r2=1559002&view=diff ============================================================================== --- hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original) +++ hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Fri Jan 17 03:19:30 2014 @@ -25,6 +25,7 @@ import java.net.URL; import java.util.Comparator; import java.util.Iterator; import java.util.Map; +import java.util.HashMap; import java.net.URI; import java.io.FileNotFoundException; @@ -537,4 +538,23 @@ public class Hadoop23Shims extends Hadoo public FileSystem createProxyFileSystem(FileSystem fs, URI uri) { return new ProxyFileSystem23(fs, uri); } + + @Override + public Map<String, String> getHadoopConfNames() { + Map<String, String> ret = new HashMap<String, String>(); + ret.put("HADOOPFS", "fs.defaultFS"); + ret.put("HADOOPMAPFILENAME", "mapreduce.map.input.file"); + ret.put("HADOOPMAPREDINPUTDIR", "mapreduce.input.fileinputformat.inputdir"); + ret.put("HADOOPMAPREDINPUTDIRRECURSIVE", "mapreduce.input.fileinputformat.input.dir.recursive"); + ret.put("MAPREDMAXSPLITSIZE", "mapreduce.input.fileinputformat.split.maxsize"); + ret.put("MAPREDMINSPLITSIZE", "mapreduce.input.fileinputformat.split.minsize"); + ret.put("MAPREDMINSPLITSIZEPERNODE", "mapreduce.input.fileinputformat.split.minsize.per.rack"); + ret.put("MAPREDMINSPLITSIZEPERRACK", "mapreduce.input.fileinputformat.split.minsize.per.node"); + ret.put("HADOOPNUMREDUCERS", "mapreduce.job.reduces"); + ret.put("HADOOPJOBNAME", "mapreduce.job.name"); + ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapreduce.reduce.speculative"); + ret.put("MAPREDSETUPCLEANUPNEEDED", "mapreduce.job.committer.setup.cleanup.needed"); + ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed"); + return ret; + } } Modified: hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java URL: http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1559002&r1=1559001&r2=1559002&view=diff ============================================================================== --- hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original) +++ hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Fri Jan 17 03:19:30 2014 @@ -39,6 +39,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier; import org.apache.hadoop.hive.thrift.DelegationTokenSelector; import org.apache.hadoop.http.HtmlQuoting; @@ -324,18 +325,18 @@ public abstract class HadoopShimsSecure @Override public InputSplitShim[] getSplits(JobConf job, int numSplits) throws IOException { - long minSize = job.getLong("mapred.min.split.size", 0); + long minSize = job.getLong(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZE"), 0); // For backward compatibility, let the above parameter be used - if (job.getLong("mapred.min.split.size.per.node", 0) == 0) { + if (job.getLong(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZEPERNODE"), 0) == 0) { super.setMinSplitSizeNode(minSize); } - if (job.getLong("mapred.min.split.size.per.rack", 0) == 0) { + if (job.getLong(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZEPERRACK"), 0) == 0) { super.setMinSplitSizeRack(minSize); } - if (job.getLong("mapred.max.split.size", 0) == 0) { + if (job.getLong(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMAXSPLITSIZE"), 0) == 0) { super.setMaxSplitSize(minSize); } @@ -426,11 +427,11 @@ public abstract class HadoopShimsSecure // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463) // but can be backported. So we disable setup/cleanup in all versions >= 0.19 - conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false); + conf.setBoolean(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDSETUPCLEANUPNEEDED"), false); // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206) // but can be backported. So we disable setup/cleanup in all versions >= 0.19 - conf.setBoolean("mapreduce.job.committer.task.cleanup.needed", false); + conf.setBoolean(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDTASKCLEANUPNEEDED"), false); } @Override Modified: hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java URL: http://svn.apache.org/viewvc/hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1559002&r1=1559001&r2=1559002&view=diff ============================================================================== --- hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original) +++ hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Fri Jan 17 03:19:30 2014 @@ -28,6 +28,7 @@ import java.security.PrivilegedException import java.util.Comparator; import java.util.Iterator; import java.util.List; +import java.util.Map; import javax.security.auth.login.LoginException; @@ -517,4 +518,6 @@ public interface HadoopShims { * other file system. */ public FileSystem createProxyFileSystem(FileSystem fs, URI uri); + + public Map<String, String> getHadoopConfNames(); }