Author: omalley
Date: Tue Aug 12 15:25:22 2008
New Revision: 685348
URL: http://svn.apache.org/viewvc?rev=685348&view=rev
Log:
HADOOP-3664. Remove the deprecated method InputFormat.validateInput,
which is no longer needed. (tomwhite via omalley)
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/SleepJob.java
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InputFormat.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobClient.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MultiFileInputFormat.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFormat.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/CompositeInputFormat.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/Parser.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/DelegatingInputFormat.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/NLineInputFormat.java
hadoop/core/trunk/src/test/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/DFSTestUtil.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDatanodeDeath.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDecommission.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileAppend.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileCreation.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSetrepIncreasing.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSmallBlock.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/EmptyInputFormat.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormat.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMapCollection.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/ConfigurableInputFormat.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/FakeIF.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/TestDatamerge.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Aug 12 15:25:22 2008
@@ -37,6 +37,9 @@
HADOOP-3792. Make FsShell -test consistent with unix semantics, returning
zero for true and non-zero for false. (Ben Slusky via cdouglas)
+ HADOOP-3664. Remove the deprecated method InputFormat.validateInput,
+ which is no longer needed. (tomwhite via omalley)
+
NEW FEATURES
HADOOP-3341. Allow streaming jobs to specify the field separator for map
Modified:
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
(original)
+++
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
Tue Aug 12 15:25:22 2008
@@ -66,10 +66,6 @@
*/
public abstract boolean next(Text key, Text value) throws IOException;
- /** This implementation always returns true. */
- public void validateInput(JobConf job) throws IOException {
- }
-
/** Returns the current position in the input. */
public synchronized long getPos() throws IOException {
return in_.getPos();
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java Tue Aug 12
15:25:22 2008
@@ -330,24 +330,6 @@
* hostnames of machines that contain the given file.
*
* The FileSystem will simply return an elt containing 'localhost'.
- * @deprecated use [EMAIL PROTECTED] #getFileBlockLocations(FileStatus,
long, long)}
- */
- @Deprecated
- public BlockLocation[] getFileBlockLocations(Path f,
- long start, long len) throws IOException {
-
- return getFileBlockLocations(getFileStatus(f), start, len);
- }
-
- /**
- * Return an array containing hostnames, offset and size of
- * portions of the given file. For a nonexistent
- * file or regions, null will be returned.
- *
- * This call is most helpful with DFS, where it returns
- * hostnames of machines that contain the given file.
- *
- * The FileSystem will simply return an elt containing 'localhost'.
*/
public BlockLocation[] getFileBlockLocations(FileStatus file,
long start, long len) throws IOException {
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java Tue
Aug 12 15:25:22 2008
@@ -87,21 +87,6 @@
fs.checkPath(path);
}
- /**
- * Return an array containing hostnames, offset and size of
- * portions of the given file. For a nonexistent
- * file or regions, null will be returned.
- *
- * This call is most helpful with DFS, where it returns
- * hostnames of machines that contain the given file.
- *
- * The FileSystem will simply return an elt containing 'localhost'.
- */
- public BlockLocation[] getFileBlockLocations(Path f, long start,
- long len) throws IOException {
- return fs.getFileBlockLocations(f, start, len);
- }
-
public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
long len) throws IOException {
return fs.getFileBlockLocations(file, start, len);
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java Tue Aug 12
15:25:22 2008
@@ -470,10 +470,11 @@
System.out.flush();
boolean printWarning = false;
- long len = fs.getFileStatus(f).getLen();
+ FileStatus status = fs.getFileStatus(f);
+ long len = status.getLen();
for(boolean done = false; !done; ) {
- BlockLocation[] locations = fs.getFileBlockLocations(f, 0, len);
+ BlockLocation[] locations = fs.getFileBlockLocations(status, 0, len);
int i = 0;
for(; i < locations.length &&
locations[i].getHosts().length == rep; i++)
Modified:
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
(original)
+++ hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
Tue Aug 12 15:25:22 2008
@@ -94,10 +94,6 @@
* for each map.
*/
static class RandomInputFormat implements InputFormat<Text, Text> {
-
- /** Accept all job confs */
- public void validateInput(JobConf job) throws IOException {
- }
/**
* Generate the requested number of file splits, with the filename
Modified:
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/SleepJob.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/examples/org/apache/hadoop/examples/SleepJob.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/examples/org/apache/hadoop/examples/SleepJob.java
(original)
+++ hadoop/core/trunk/src/examples/org/apache/hadoop/examples/SleepJob.java Tue
Aug 12 15:25:22 2008
@@ -66,7 +66,6 @@
public static class SleepInputFormat extends Configured
implements InputFormat<IntWritable,IntWritable> {
- public void validateInput(JobConf conf) { }
public InputSplit[] getSplits(JobConf conf, int numSplits) {
InputSplit[] ret = new InputSplit[numSplits];
for (int i = 0; i < numSplits; ++i) {
Modified:
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
(original)
+++
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
Tue Aug 12 15:25:22 2008
@@ -173,9 +173,6 @@
return splits;
}
- public void validateInput(JobConf job) throws IOException {
- // NOTHING
- }
}
static long getNumberOfRows(JobConf job) {
Modified:
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -181,26 +181,6 @@
LOG.info("Total input paths to process : " + result.size());
return result.toArray(new FileStatus[result.size()]);
}
-
- /** List input directories.
- * Subclasses may override to, e.g., select only files matching a regular
- * expression.
- *
- * @param job the job to list input paths for
- * @return array of Path objects
- * @throws IOException if zero items.
- * @deprecated Use [EMAIL PROTECTED] #listStatus(JobConf)} instead.
- */
- @Deprecated
- protected Path[] listPaths(JobConf job)
- throws IOException {
- return FileUtil.stat2Paths(listStatus(job));
- }
-
- @Deprecated
- public void validateInput(JobConf job) throws IOException {
- // handled by getSplits
- }
/** Splits files returned by [EMAIL PROTECTED] #listStatus(JobConf)} when
* they're too big.*/
@@ -209,17 +189,6 @@
throws IOException {
FileStatus[] files = listStatus(job);
- // Applications may have overridden listPaths so we need to check if
- // it returns a different set of paths to listStatus.
- // If it does we revert to the old behavior using Paths not FileStatus
- // objects.
- // When listPaths is removed, this check can be removed too.
- Path[] paths = listPaths(job);
- if (!Arrays.equals(paths, FileUtil.stat2Paths(files))) {
- LOG.warn("FileInputFormat#listPaths is deprecated, override listStatus "
+
- "instead.");
- return getSplitsForPaths(job, numSplits, paths);
- }
long totalSize = 0; // compute total size
for (FileStatus file: files) { // check we have valid files
if (file.isDir()) {
@@ -265,57 +234,6 @@
LOG.debug("Total # of splits: " + splits.size());
return splits.toArray(new FileSplit[splits.size()]);
}
-
- @Deprecated
- private InputSplit[] getSplitsForPaths(JobConf job, int numSplits,
- Path[] files) throws IOException {
- long totalSize = 0; // compute total size
- for (int i = 0; i < files.length; i++) { // check we have valid files
- Path file = files[i];
- FileSystem fs = file.getFileSystem(job);
- if (fs.isDirectory(file) || !fs.exists(file)) {
- throw new IOException("Not a file: "+files[i]);
- }
- totalSize += fs.getLength(files[i]);
- }
-
- long goalSize = totalSize / (numSplits == 0 ? 1 : numSplits);
- long minSize = Math.max(job.getLong("mapred.min.split.size", 1),
- minSplitSize);
-
- // generate splits
- ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits);
- for (int i = 0; i < files.length; i++) {
- Path file = files[i];
- FileSystem fs = file.getFileSystem(job);
- long length = fs.getLength(file);
- BlockLocation[] blkLocations = fs.getFileBlockLocations(file, 0, length);
- if ((length != 0) && isSplitable(fs, file)) {
- long blockSize = fs.getBlockSize(file);
- long splitSize = computeSplitSize(goalSize, minSize, blockSize);
-
- long bytesRemaining = length;
- while (((double) bytesRemaining)/splitSize > SPLIT_SLOP) {
- int blkIndex = getBlockIndex(blkLocations, length-bytesRemaining);
- splits.add(new FileSplit(file, length-bytesRemaining, splitSize,
- blkLocations[blkIndex].getHosts()));
- bytesRemaining -= splitSize;
- }
-
- if (bytesRemaining != 0) {
- splits.add(new FileSplit(file, length-bytesRemaining,
bytesRemaining,
- blkLocations[blkLocations.length-1].getHosts()));
- }
- } else if (length != 0) {
- splits.add(new FileSplit(file, 0, length, blkLocations[0].getHosts()));
- } else {
- //Create empty hosts array for zero length files
- splits.add(new FileSplit(file, 0, length, new String[0]));
- }
- }
- LOG.debug("Total # of splits: " + splits.size());
- return splits.toArray(new FileSplit[splits.size()]);
- }
protected long computeSplitSize(long goalSize, long minSize,
long blockSize) {
Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InputFormat.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InputFormat.java Tue
Aug 12 15:25:22 2008
@@ -64,21 +64,6 @@
*/
public interface InputFormat<K, V> {
- /**
- * Check for validity of the input-specification for the job.
- *
- * <p>This method is used to validate the input directories when a job is
- * submitted so that the [EMAIL PROTECTED] JobClient} can fail early, with
an useful
- * error message, in case of errors. For e.g. input directory does not exist.
- * </p>
- *
- * @param job job configuration.
- * @throws InvalidInputException if the job does not have valid input
- * @deprecated getSplits is called in the client and can perform any
- * necessary validation of the input
- */
- void validateInput(JobConf job) throws IOException;
-
/**
* Logically split the set of input files for the job.
*
Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobClient.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobClient.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobClient.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobClient.java Tue
Aug 12 15:25:22 2008
@@ -732,18 +732,13 @@
configureCommandLineOptions(job, submitJobDir, submitJarFile);
Path submitJobFile = new Path(submitJobDir, "job.xml");
-
- // Check the input specification
- InputFormat inFormat = job.getInputFormat();
- inFormat.validateInput(job);
-
// Check the output specification
job.getOutputFormat().checkOutputSpecs(fs, job);
// Create the splits for the job
LOG.debug("Creating splits at " + fs.makeQualified(submitSplitFile));
InputSplit[] splits =
- inFormat.getSplits(job, job.getNumMapTasks());
+ job.getInputFormat().getSplits(job, job.getNumMapTasks());
// sort the splits into order based on size, so that the biggest
// go first
Arrays.sort(splits, new Comparator<InputSplit>() {
Modified:
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MultiFileInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MultiFileInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MultiFileInputFormat.java
(original)
+++
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MultiFileInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -23,6 +23,7 @@
import java.util.List;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
/**
@@ -41,7 +42,7 @@
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
- Path[] paths = listPaths(job);
+ Path[] paths = FileUtil.stat2Paths(listStatus(job));
List<MultiFileSplit> splits = new
ArrayList<MultiFileSplit>(Math.min(numSplits, paths.length));
if (paths.length != 0) {
// HADOOP-1818: Manage splits only if there are paths
Modified:
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFormat.java
(original)
+++
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -20,6 +20,7 @@
import java.io.IOException;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
@@ -31,15 +32,19 @@
public SequenceFileInputFormat() {
setMinSplitSize(SequenceFile.SYNC_INTERVAL);
}
-
- protected Path[] listPaths(JobConf job)
- throws IOException {
-
- Path[] files = super.listPaths(job);
+
+ @Override
+ protected FileStatus[] listStatus(JobConf job) throws IOException {
+ FileStatus[] files = super.listStatus(job);
for (int i = 0; i < files.length; i++) {
- Path file = files[i];
- if (file.getFileSystem(job).isDirectory(file)) { // it's a MapFile
- files[i] = new Path(file, MapFile.DATA_FILE_NAME); // use the data file
+ FileStatus file = files[i];
+ if (file.isDir()) { // it's a MapFile
+ files[i] = new FileStatus(file.getLen(), file.isDir(),
+ file.getReplication(), file.getBlockSize(),
+ file.getModificationTime(), file.getPermission(),
+ file.getOwner(), file.getGroup(),
+ // use the data file
+ new Path(file.getPath(), MapFile.DATA_FILE_NAME));
}
}
return files;
Modified:
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/CompositeInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/CompositeInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/CompositeInputFormat.java
(original)
+++
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/CompositeInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -108,15 +108,6 @@
}
/**
- * Verify that this composite has children and that all its children
- * can validate their input.
- */
- public void validateInput(JobConf job) throws IOException {
- setFormat(job);
- root.validateInput(job);
- }
-
- /**
* Build a CompositeInputSplit from the child InputFormats by assigning the
* ith split from each child to the ith composite split.
*/
Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/Parser.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/Parser.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/Parser.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/join/Parser.java Tue
Aug 12 15:25:22 2008
@@ -296,10 +296,6 @@
return conf;
}
- public void validateInput(JobConf job) throws IOException {
- inf.validateInput(getConf(job));
- }
-
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
return inf.getSplits(getConf(job), numSplits);
@@ -355,15 +351,6 @@
}
}
- public void validateInput(JobConf job) throws IOException {
- if (0 == kids.size()) {
- throw new IOException("Childless composite");
- }
- for (Node n : kids) {
- n.validateInput(job);
- }
- }
-
/**
* Combine InputSplits from child InputFormats into a
* [EMAIL PROTECTED] CompositeInputSplit}.
Modified:
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/DelegatingInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/DelegatingInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/DelegatingInputFormat.java
(original)
+++
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/DelegatingInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -44,18 +44,6 @@
*/
public class DelegatingInputFormat<K, V> implements InputFormat<K, V> {
- @Deprecated
- public void validateInput(JobConf conf) throws IOException {
- JobConf confCopy = new JobConf(conf);
- Map<Path, InputFormat> formatMap = MultipleInputs.getInputFormatMap(conf);
- for (Entry<Path, InputFormat> entry : formatMap.entrySet()) {
- Path path = entry.getKey();
- InputFormat format = entry.getValue();
- FileInputFormat.setInputPaths(confCopy, path);
- format.validateInput(confCopy);
- }
- }
-
public InputSplit[] getSplits(JobConf conf, int numSplits) throws
IOException {
JobConf confCopy = new JobConf(conf);
Modified:
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/NLineInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/NLineInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/NLineInputFormat.java
(original)
+++
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/lib/NLineInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -78,14 +78,12 @@
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
ArrayList<FileSplit> splits = new ArrayList<FileSplit>();
- Path[] files = listPaths(job);
- for (int i=0; i < files.length; i++) {
- Path fileName = files[i];
- FileSystem fs = fileName.getFileSystem(job);
- FileStatus status = fs.getFileStatus(fileName);
- if (status.isDir() || !fs.exists(fileName)) {
+ for (FileStatus status : listStatus(job)) {
+ Path fileName = status.getPath();
+ if (status.isDir()) {
throw new IOException("Not a file: " + fileName);
}
+ FileSystem fs = fileName.getFileSystem(job);
LineReader lr = null;
try {
FSDataInputStream in = fs.open(fileName);
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java
Tue Aug 12 15:25:22 2008
@@ -105,7 +105,8 @@
}
public String[][] getDataLocation(String path, long start, long len)
throws IOException {
BlockLocation[] blkLocations =
- localFS.getFileBlockLocations(new Path(path), start, len);
+ localFS.getFileBlockLocations(localFS.getFileStatus(new Path(path)),
+ start, len);
if ((blkLocations == null) || (blkLocations.length == 0)) {
return new String[0][];
}
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/DFSTestUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/DFSTestUtil.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/DFSTestUtil.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/DFSTestUtil.java Tue Aug
12 15:25:22 2008
@@ -206,8 +206,8 @@
boolean good;
do {
good = true;
- BlockLocation locs[] = fs.getFileBlockLocations(fileName, 0,
- Long.MAX_VALUE);
+ BlockLocation locs[] = fs.getFileBlockLocations(
+ fs.getFileStatus(fileName), 0, Long.MAX_VALUE);
for (int j = 0; j < locs.length; j++) {
String[] loc = locs[j].getHosts();
if (loc.length != replFactor) {
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDatanodeDeath.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDatanodeDeath.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDatanodeDeath.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDatanodeDeath.java
Tue Aug 12 15:25:22 2008
@@ -143,33 +143,6 @@
stm.write(buffer, mid, fileSize - mid);
}
-
- // wait till this block is confirmed by the datanodes.
- private void waitBlockConfirmation(FileSystem fileSys, Path name,
- int repl, int blockNumber)
- throws IOException {
- boolean done = false;
- long start = blockSize * blockNumber;
- long end = blockSize * (blockNumber + 1) -1;
-
- while (!done) {
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {}
- done = true;
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, start,
- end);
- if (locations.length < 1) {
- done = false;
- continue;
- }
- if (locations[0].getHosts().length < repl) {
- done = false;
- continue;
- }
- }
- }
-
/**
* For blocks that reside on the nodes that are down, verify that their
* replication factor is 1 more than the specified one.
@@ -224,8 +197,8 @@
Thread.sleep(1000);
} catch (InterruptedException e) {}
done = true;
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0,
- filesize);
+ BlockLocation[] locations = fileSys.getFileBlockLocations(
+ fileSys.getFileStatus(name), 0, filesize);
if (locations.length < numblocks) {
if (attempt > 100) {
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDecommission.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDecommission.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDecommission.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDecommission.java Tue
Aug 12 15:25:22 2008
@@ -89,27 +89,13 @@
private void checkFile(FileSystem fileSys, Path name, int repl)
throws IOException {
- boolean done = false;
- while (!done) {
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {}
- done = true;
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0,
- fileSize);
- for (int idx = 0; idx < locations.length; idx++) {
- if (locations[idx].getHosts().length < repl) {
- done = false;
- break;
- }
- }
- }
+ DFSTestUtil.waitReplication(fileSys, name, (short) repl);
}
private void printFileLocations(FileSystem fileSys, Path name)
throws IOException {
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0,
- fileSize);
+ BlockLocation[] locations = fileSys.getFileBlockLocations(
+ fileSys.getFileStatus(name), 0, fileSize);
for (int idx = 0; idx < locations.length; idx++) {
String[] loc = locations[idx].getHosts();
System.out.print("Block[" + idx + "] : ");
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileAppend.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileAppend.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileAppend.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileAppend.java Tue
Aug 12 15:25:22 2008
@@ -92,8 +92,8 @@
Thread.sleep(1000);
} catch (InterruptedException e) {}
done = true;
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0,
- fileSize);
+ BlockLocation[] locations = fileSys.getFileBlockLocations(
+ fileSys.getFileStatus(name), 0, fileSize);
if (locations.length < numBlocks) {
System.out.println("Number of blocks found " + locations.length);
done = false;
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileCreation.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileCreation.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileCreation.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileCreation.java Tue
Aug 12 15:25:22 2008
@@ -105,8 +105,8 @@
Thread.sleep(1000);
} catch (InterruptedException e) {}
done = true;
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0,
- fileSize);
+ BlockLocation[] locations = fileSys.getFileBlockLocations(
+ fileSys.getFileStatus(name), 0, fileSize);
if (locations.length < numBlocks) {
done = false;
continue;
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileStatus.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestFileStatus.java Tue
Aug 12 15:25:22 2008
@@ -58,21 +58,7 @@
private void checkFile(FileSystem fileSys, Path name, int repl)
throws IOException {
- boolean done = false;
- while (!done) {
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {}
- done = true;
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0,
- fileSize);
- for (int idx = 0; idx < locations.length; idx++) {
- if (locations[idx].getHosts().length < repl) {
- done = false;
- break;
- }
- }
- }
+ DFSTestUtil.waitReplication(fileSys, name, (short) repl);
}
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSetrepIncreasing.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSetrepIncreasing.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSetrepIncreasing.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSetrepIncreasing.java
Tue Aug 12 15:25:22 2008
@@ -56,8 +56,9 @@
//get fs again since the old one may be closed
fs = cluster.getFileSystem();
- long len = fs.getFileStatus(f).getLen();
- for(BlockLocation locations : fs.getFileBlockLocations(f, 0, len)) {
+ FileStatus file = fs.getFileStatus(f);
+ long len = file.getLen();
+ for(BlockLocation locations : fs.getFileBlockLocations(file, 0, len)) {
assertTrue(locations.getHosts().length == toREP);
}
TestDFSShell.show("done setrep waiting: " + root);
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSmallBlock.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSmallBlock.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSmallBlock.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestSmallBlock.java Tue
Aug 12 15:25:22 2008
@@ -60,8 +60,8 @@
}
private void checkFile(FileSystem fileSys, Path name) throws IOException {
- BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0,
- fileSize);
+ BlockLocation[] locations = fileSys.getFileBlockLocations(
+ fileSys.getFileStatus(name), 0, fileSize);
assertEquals("Number of blocks", fileSize, locations.length);
FSDataInputStream stm = fileSys.open(name);
byte[] expected = new byte[fileSize];
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/EmptyInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/EmptyInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/EmptyInputFormat.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/EmptyInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -27,8 +27,6 @@
*/
public class EmptyInputFormat<K, V> implements InputFormat<K, V> {
- public void validateInput(JobConf job) { }
-
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException
{
return new InputSplit[0];
}
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java
Tue Aug 12 15:25:22 2008
@@ -192,8 +192,6 @@
static class RandomInputFormat implements InputFormat {
- public void validateInput(JobConf conf) { }
-
public InputSplit[] getSplits(JobConf conf, int numSplits) {
InputSplit[] splits = new InputSplit[numSplits];
for (int i = 0; i < numSplits; ++i) {
@@ -376,12 +374,6 @@
* reading input from arbitrary locations ("indirect" reads).
*/
static class IndirectInputFormat implements InputFormat {
- public void validateInput(JobConf job) throws IOException {
- InputFormat indirIF = (InputFormat)ReflectionUtils.newInstance(
- job.getClass("mapred.indirect.input.format",
- SequenceFileInputFormat.class), job);
- indirIF.validateInput(job);
- }
static class IndirectSplit implements InputSplit {
Path file;
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormat.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -19,12 +19,13 @@
import java.io.DataOutputStream;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
+import junit.framework.TestCase;
+
import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-
-import junit.framework.TestCase;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
public class TestFileInputFormat extends TestCase {
@@ -54,8 +55,9 @@
TextInputFormat inFormat = new TextInputFormat();
inFormat.configure(conf);
InputSplit[] splits = inFormat.getSplits(conf, 1);
+ FileStatus fileStatus = fs.getFileStatus(path);
BlockLocation[] locations =
- fs.getFileBlockLocations(path, 0, fs.getFileStatus(path).getLen());
+ fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
System.out.println("Made splits");
// make sure that each split is a block and the locations match
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
Tue Aug 12 15:25:22 2008
@@ -18,6 +18,8 @@
package org.apache.hadoop.mapred;
import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
@@ -37,10 +39,6 @@
return null;
}
- public Path[] listPaths(JobConf job) throws IOException {
- return super.listPaths(job);
- }
-
}
private static FileSystem localFs = null;
@@ -110,10 +108,9 @@
DummyFileInputFormat inputFormat =
(DummyFileInputFormat) conf.getInputFormat();
- Path[] listPaths = inputFormat.listPaths(conf);
Set<Path> computedFiles = new HashSet<Path>();
- for (Path path : listPaths) {
- computedFiles.add(path);
+ for (FileStatus file : inputFormat.listStatus(conf)) {
+ computedFiles.add(file.getPath());
}
createdFiles.remove(localFs.makeQualified(new Path(workDir, "_hello")));
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMapCollection.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMapCollection.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMapCollection.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMapCollection.java
Tue Aug 12 15:25:22 2008
@@ -213,8 +213,6 @@
public FakeIF() { }
- public void validateInput(JobConf conf) { }
-
public InputSplit[] getSplits(JobConf conf, int splits) {
return new InputSplit[] { new FakeSplit() };
}
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java
Tue Aug 12 15:25:22 2008
@@ -203,9 +203,6 @@
}
}
- public void validateInput(JobConf job) throws IOException {
- }
-
public InputSplit[] getSplits(JobConf job,
int numSplits) throws IOException {
return new MySplit[]{new MySplit(0, 1), new MySplit(1, 3),
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
Tue Aug 12 15:25:22 2008
@@ -25,6 +25,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.io.BytesWritable;
@@ -175,27 +176,7 @@
writer.append(new BytesWritable(), new BytesWritable());
writer.close();
fileSys.setReplication(name, replication);
- waitForReplication(fileSys, namenode, name, replication);
- }
- static void waitForReplication(FileSystem fileSys, NameNode namenode,
- Path name, short replication) throws IOException {
- //wait for the replication to happen
- boolean isReplicationDone;
-
- do {
- BlockLocation[] hints = fileSys.getFileBlockLocations(name, 0,
- Long.MAX_VALUE);
- if (hints[0].getHosts().length == replication) {
- isReplicationDone = true;
- } else {
- isReplicationDone = false;
- }
- try {
- Thread.sleep(1000);
- } catch (InterruptedException ie) {
- return;
- }
- } while(!isReplicationDone);
+ DFSTestUtil.waitReplication(fileSys, name, replication);
}
static RunningJob launchJob(JobConf jobConf, Path inDir, Path outputPath,
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
Tue Aug 12 15:25:22 2008
@@ -72,9 +72,6 @@
*/
public static class RandomInputFormat implements InputFormat<Text, Text> {
- public void validateInput(JobConf job) throws IOException {
- }
-
public InputSplit[] getSplits(JobConf job,
int numSplits) throws IOException {
InputSplit[] result = new InputSplit[numSplits];
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/ConfigurableInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/ConfigurableInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/ConfigurableInputFormat.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/ConfigurableInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred.join;
-
-import java.io.IOException;
-
-import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobConfigurable;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapred.Reporter;
-
-public class ConfigurableInputFormat implements InputFormat, JobConfigurable {
- boolean configured = false;
-
- public ConfigurableInputFormat() { }
-
- public void configure(JobConf job) {
- configured = true;
- }
- public void validateInput(JobConf job) throws IOException {
- if (!configured)
- throw new IOException("Failed to configure child InputFormat");
- }
-
- public InputSplit[] getSplits(JobConf job, int numSplits)
- throws IOException {
- return null;
- }
-
- public RecordReader getRecordReader(
- InputSplit split, JobConf job, Reporter reporter) throws IOException {
- return null;
- }
-}
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/FakeIF.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/FakeIF.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/FakeIF.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/FakeIF.java Tue
Aug 12 15:25:22 2008
@@ -63,8 +63,6 @@
public FakeIF() { }
- public void validateInput(JobConf conf) { }
-
public InputSplit[] getSplits(JobConf conf, int splits) {
return new InputSplit[] { new FakeSplit() };
}
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/TestDatamerge.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/TestDatamerge.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/TestDatamerge.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/join/TestDatamerge.java
Tue Aug 12 15:25:22 2008
@@ -340,14 +340,6 @@
}
- public void testConfiguredInputFormat() throws Exception {
- JobConf conf = new JobConf();
- conf.set("mapred.join.expr", CompositeInputFormat.compose(
- ConfigurableInputFormat.class, "/dingos"));
- CompositeInputFormat cif = new CompositeInputFormat();
- cif.validateInput(conf);
- }
-
public void testEmptyJoin() throws Exception {
JobConf job = new JobConf();
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
Tue Aug 12 15:25:22 2008
@@ -60,7 +60,6 @@
}
return result.toArray(new InputSplit[result.size()]);
}
- public void validateInput(JobConf conf) { }
public RecordReader<IntWritable, Text> getRecordReader(InputSplit split,
JobConf conf,
Reporter reporter) {
Modified: hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java Tue Aug 12
15:25:22 2008
@@ -206,11 +206,6 @@
static class CopyInputFormat implements InputFormat<Text, Text> {
/**
- * Does nothing.
- */
- public void validateInput(JobConf job) throws IOException { }
-
- /**
* Produce splits such that each is no greater than the quotient of the
* total size and the number of splits requested.
* @param job The handle to the JobConf object
Modified:
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java?rev=685348&r1=685347&r2=685348&view=diff
==============================================================================
--- hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
(original)
+++ hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java Tue
Aug 12 15:25:22 2008
@@ -144,7 +144,6 @@
*/
static class HArchiveInputFormat implements InputFormat<LongWritable, Text> {
- public void validateInput(JobConf jconf) throws IOException{};
//generate input splits from the src file lists
public InputSplit[] getSplits(JobConf jconf, int numSplits)