瀏覽代碼

Merge -r 634562:634563 from trunk to branch-0.16 to fix HADOOP-2958

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.16@634565 13f79535-47bb-0310-9956-ffa450edef68
Arun Murthy 17 年之前
父節點
當前提交
b4f20d6e0b

+ 6 - 0
CHANGES.txt

@@ -126,6 +126,12 @@ Release 0.16.1 - Unreleased
     HADOOP-2756. NPE in DFSClient while closing DFSOutputStreams 
     under load. (rangadi)
 
+    HADOOP-2958. Fixed FileBench which broke due to HADOOP-2391 which performs
+    a check for existence of the output directory and a trivial bug in
+    GenericMRLoadGenerator where min/max word lenghts were identical since
+    they were looking at the same config variables (Chris Douglas via
+    acmurthy) 
+
 Release 0.16.0 - 2008-02-07
 
   INCOMPATIBLE CHANGES

+ 16 - 11
src/test/org/apache/hadoop/io/FileBench.java

@@ -111,10 +111,11 @@ public class FileBench extends Configured implements Tool {
     Text key = new Text();
     Text val = new Text();
 
+    final String fn = conf.get("test.filebench.name", "");
+    final Path outd = conf.getOutputPath();
     OutputFormat outf = conf.getOutputFormat();
-    Path out = conf.getOutputPath();
     RecordWriter<Text,Text> rw =
-      outf.getRecordWriter(out.getFileSystem(conf), conf, out.toString(),
+      outf.getRecordWriter(outd.getFileSystem(conf), conf, fn,
                            Reporter.NULL);
     try {
       long acc = 0L;
@@ -137,7 +138,8 @@ public class FileBench extends Configured implements Tool {
   @SuppressWarnings("unchecked") // InputFormat instantiation
   static long readBench(JobConf conf) throws IOException {
     InputFormat inf = conf.getInputFormat();
-    Path pin = conf.getInputPaths()[0];
+    final String fn = conf.get("test.filebench.name", "");
+    Path pin = new Path(conf.getInputPaths()[0], fn);
     FileStatus in = pin.getFileSystem(conf).getFileStatus(pin);
     RecordReader rr = inf.getRecordReader(
         new FileSplit(pin, 0, in.getLen(), conf), conf, Reporter.NULL);
@@ -173,6 +175,7 @@ public class FileBench extends Configured implements Tool {
       try {
         if ("-dir".equals(argv[i])) {
           root = new Path(argv[++i]).makeQualified(fs);
+          System.out.println("DIR: " + root.toString());
         } else if ("-seed".equals(argv[i])) {
           job.setLong("filebench.seed", Long.valueOf(argv[++i]));
         } else if (argv[i].startsWith("-no")) {
@@ -199,6 +202,8 @@ public class FileBench extends Configured implements Tool {
     fillBlocks(job);
     job.setOutputKeyClass(Text.class);
     job.setOutputValueClass(Text.class);
+    job.setInputPath(root);
+    job.setOutputPath(root);
 
     if (null == cc) cc = EnumSet.allOf(CCodec.class);
     if (null == ct) ct = EnumSet.allOf(CType.class);
@@ -217,7 +222,7 @@ public class FileBench extends Configured implements Tool {
                 typ.name().toUpperCase();
               typ.configure(job);
               System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
-              System.out.println(rwop.exec(new Path(root, fn), job) / 1000 +
+              System.out.println(rwop.exec(fn, job) / 1000 +
                   " seconds");
             }
           } else {
@@ -226,10 +231,10 @@ public class FileBench extends Configured implements Tool {
               cod.name().toUpperCase();
             Path p = new Path(root, fn);
             if (rwop == RW.r && !fs.exists(p)) {
-              p = new Path(root, fn + cod.getExt());
+              fn += cod.getExt();
             }
             System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
-            System.out.println(rwop.exec(p, job) / 1000 +
+            System.out.println(rwop.exec(fn, job) / 1000 +
                 " seconds");
           }
         }
@@ -287,20 +292,20 @@ public class FileBench extends Configured implements Tool {
   }
   enum RW {
     w() {
-      public long exec(Path p, JobConf job) throws IOException {
-        job.setOutputPath(p);
+      public long exec(String fn, JobConf job) throws IOException {
+        job.set("test.filebench.name", fn);
         return writeBench(job);
       }
     },
 
     r() {
-      public long exec(Path p, JobConf job) throws IOException {
-        job.setInputPath(p);
+      public long exec(String fn, JobConf job) throws IOException {
+        job.set("test.filebench.name", fn);
         return readBench(job);
       }
     };
 
-    public abstract long exec(Path p, JobConf job) throws IOException;
+    public abstract long exec(String fn, JobConf job) throws IOException;
   }
   static Map<Class<? extends Enum>, Map<String,? extends Enum>> fullmap
     = new HashMap<Class<? extends Enum>, Map<String,? extends Enum>>();

+ 2 - 2
src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java

@@ -269,9 +269,9 @@ public class GenericMRLoadGenerator extends Configured implements Tool {
       bytesToWrite = job.getLong("test.randomtextwrite.bytes_per_map",
                                     1*1024*1024*1024);
       keymin = job.getInt("test.randomtextwrite.min_words_key", 5);
-      keymax = job.getInt("test.randomtextwrite.min_words_key", 10);
+      keymax = job.getInt("test.randomtextwrite.max_words_key", 10);
       valmin = job.getInt("test.randomtextwrite.min_words_value", 5);
-      valmax = job.getInt("test.randomtextwrite.min_words_value", 10);
+      valmax = job.getInt("test.randomtextwrite.max_words_value", 10);
     }
 
     public void map(Text key, Text val, OutputCollector<Text,Text> output,