Author: cutting
Date: Fri Jan 26 14:40:45 2007
New Revision: 500391

URL: http://svn.apache.org/viewvc?view=rev&rev=500391
Log:
HADOOP-935.  Fix contrib/abacus to not delete pre-existing output files, but 
rather to fail in this case.  Contributed by Runping.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    
lucene/hadoop/trunk/src/contrib/abacus/src/java/org/apache/hadoop/abacus/ValueAggregatorJob.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=500391&r1=500390&r2=500391
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Fri Jan 26 14:40:45 2007
@@ -73,6 +73,9 @@
 22. HADOOP-929.  Fix PhasedFileSystem to pass configuration to
     underlying FileSystem.  (Sanjay Dahiya via cutting)
 
+23. HADOOP-935.  Fix contrib/abacus to not delete pre-existing output
+    files, but rather to fail in this case.  (Runping Qi via cutting)
+
 
 Release 0.10.1 - 2007-01-10
 

Modified: 
lucene/hadoop/trunk/src/contrib/abacus/src/java/org/apache/hadoop/abacus/ValueAggregatorJob.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/abacus/src/java/org/apache/hadoop/abacus/ValueAggregatorJob.java?view=diff&rev=500391&r1=500390&r2=500391
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/abacus/src/java/org/apache/hadoop/abacus/ValueAggregatorJob.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/abacus/src/java/org/apache/hadoop/abacus/ValueAggregatorJob.java
 Fri Jan 26 14:40:45 2007
@@ -100,6 +100,10 @@
   public static JobConf createValueAggregatorJob(String args[])
       throws IOException {
 
+    if (args.length < 2) {
+      System.out.println("usage: inputDirs outDir [numOfReducer 
[textinputformat|seq [specfile]]]");
+      System.exit(1);
+    }
     String inputDir = args[0];
     String outputDir = args[1];
     int numOfReducers = 1;
@@ -127,17 +131,11 @@
 
     String[] inputDirsSpecs = inputDir.split(",");
     for (int i = 0; i < inputDirsSpecs.length; i++) {
-      String spec = inputDirsSpecs[i];
-      Path[] dirs = fs.globPaths(new Path(spec));
-      for (int j = 0; j < dirs.length; j++) {
-        System.out.println("Adding dir: " + dirs[j].toString());
-        theJob.addInputPath(dirs[j]);
-      }
+      theJob.addInputPath(new Path(inputDirsSpecs[i]));
     }
 
     theJob.setInputFormat(theInputFormat);
-    fs.delete(new Path(outputDir));
-
+    
     theJob.setMapperClass(ValueAggregatorMapper.class);
     theJob.setOutputPath(new Path(outputDir));
     theJob.setOutputFormat(TextOutputFormat.class);


Reply via email to