Author: omalley
Date: Tue Jun 24 08:42:17 2008
New Revision: 671219
URL: http://svn.apache.org/viewvc?rev=671219&view=rev
Log:
HADOOP-3595. Remove deprecated methods for mapred.combine.once
functionality, which was necessary to providing backwards
compatible combiner semantics for 0.18. Contributed by Chris Douglas.
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/ReduceTask.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=671219&r1=671218&r2=671219&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Jun 24 08:42:17 2008
@@ -4,6 +4,10 @@
INCOMPATIBLE CHANGES
+ HADOOP-3595. Remove deprecated methods for mapred.combine.once
+ functionality, which was necessary to providing backwards
+ compatible combiner semantics for 0.18. (cdouglas via omalley)
+
NEW FEATURES
IMPROVEMENTS
Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java?rev=671219&r1=671218&r2=671219&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java Tue Jun
24 08:42:17 2008
@@ -799,20 +799,6 @@
}
/**
- * If true, ensures the combiner is run once and only once on output from
- * the map. Otherwise, combiner may be run zero or more times.
- */
- @Deprecated
- public void setCombineOnceOnly(boolean value) {
- setBoolean("mapred.combine.once", value);
- }
-
- @Deprecated
- public boolean getCombineOnceOnly() {
- return getBoolean("mapred.combine.once", false);
- }
-
- /**
* Should speculative execution be used for this job?
* Defaults to <code>true</code>.
*
Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java?rev=671219&r1=671218&r2=671219&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java Tue Jun
24 08:42:17 2008
@@ -31,9 +31,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
-import java.util.Iterator;
import java.util.List;
-import java.util.NoSuchElementException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -834,30 +832,11 @@
writer = new IFile.Writer(job, out, keyClass, valClass, codec);
if (i == partition) {
- if (job.getCombineOnceOnly()) {
- Reducer combiner =
- (Reducer)ReflectionUtils.newInstance(combinerClass, job);
- combineCollector.setWriter(writer);
- combiner.reduce(key, new Iterator<V>() {
- private boolean done = false;
- public boolean hasNext() { return !done; }
- public V next() {
- if (done)
- throw new NoSuchElementException();
- done = true;
- return value;
- }
- public void remove() {
- throw new UnsupportedOperationException();
- }
- }, combineCollector, reporter);
- } else {
- final long recordStart = out.getPos();
- writer.append(key, value);
- // Note that our map byte count will not be accurate with
- // compression
- mapOutputByteCounter.increment(out.getPos() - recordStart);
- }
+ final long recordStart = out.getPos();
+ writer.append(key, value);
+ // Note that our map byte count will not be accurate with
+ // compression
+ mapOutputByteCounter.increment(out.getPos() - recordStart);
}
writer.close();
@@ -1057,8 +1036,7 @@
segmentStart = finalOut.getPos();
Writer<K, V> writer =
new Writer<K, V>(job, finalOut, keyClass, valClass, codec);
- if (null == combinerClass || job.getCombineOnceOnly() ||
- numSpills < minSpillsForCombine) {
+ if (null == combinerClass || numSpills < minSpillsForCombine) {
Merger.writeFile(kvIter, writer, reporter);
} else {
combineCollector.setWriter(writer);
Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/ReduceTask.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/ReduceTask.java?rev=671219&r1=671218&r2=671219&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/ReduceTask.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/ReduceTask.java Tue
Jun 24 08:42:17 2008
@@ -1286,9 +1286,7 @@
this.numCopiers = conf.getInt("mapred.reduce.parallel.copies", 5);
this.maxInFlight = 4 * numCopiers;
this.maxBackoff = conf.getInt("mapred.reduce.copy.backoff", 300);
- this.combinerClass = conf.getCombineOnceOnly()
- ? null
- : conf.getCombinerClass();
+ this.combinerClass = conf.getCombinerClass();
combineCollector = (null != combinerClass)
? new CombineOutputCollector(reduceCombineOutputCounter)
: null;