浏览代码

HADOOP-935. Fix contrib/abacus to not delete pre-existing output files, but rather to fail in this case. Contributed by Runping.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@500391 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 18 年之前
父节点
当前提交
19fb5886fe
共有 2 个文件被更改,包括 9 次插入8 次删除
  1. 3 0
      CHANGES.txt
  2. 6 8
      src/contrib/abacus/src/java/org/apache/hadoop/abacus/ValueAggregatorJob.java

+ 3 - 0
CHANGES.txt

@@ -73,6 +73,9 @@ Trunk (unreleased changes)
 22. HADOOP-929.  Fix PhasedFileSystem to pass configuration to
     underlying FileSystem.  (Sanjay Dahiya via cutting)
 
+23. HADOOP-935.  Fix contrib/abacus to not delete pre-existing output
+    files, but rather to fail in this case.  (Runping Qi via cutting)
+
 
 Release 0.10.1 - 2007-01-10
 

+ 6 - 8
src/contrib/abacus/src/java/org/apache/hadoop/abacus/ValueAggregatorJob.java

@@ -100,6 +100,10 @@ public class ValueAggregatorJob {
   public static JobConf createValueAggregatorJob(String args[])
       throws IOException {
 
+    if (args.length < 2) {
+      System.out.println("usage: inputDirs outDir [numOfReducer [textinputformat|seq [specfile]]]");
+      System.exit(1);
+    }
     String inputDir = args[0];
     String outputDir = args[1];
     int numOfReducers = 1;
@@ -127,17 +131,11 @@ public class ValueAggregatorJob {
 
     String[] inputDirsSpecs = inputDir.split(",");
     for (int i = 0; i < inputDirsSpecs.length; i++) {
-      String spec = inputDirsSpecs[i];
-      Path[] dirs = fs.globPaths(new Path(spec));
-      for (int j = 0; j < dirs.length; j++) {
-        System.out.println("Adding dir: " + dirs[j].toString());
-        theJob.addInputPath(dirs[j]);
-      }
+      theJob.addInputPath(new Path(inputDirsSpecs[i]));
     }
 
     theJob.setInputFormat(theInputFormat);
-    fs.delete(new Path(outputDir));
-
+    
     theJob.setMapperClass(ValueAggregatorMapper.class);
     theJob.setOutputPath(new Path(outputDir));
     theJob.setOutputFormat(TextOutputFormat.class);