浏览代码

MAPREDUCE-3319. Hadoop example multifilewc broken in 0.20.205.0. Contributed by Subroto Sanyal.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1.0@1214666 13f79535-47bb-0310-9956-ffa450edef68
Matthew Foley 13 年之前
父节点
当前提交
fa9df7589b
共有 2 个文件被更改,包括 8 次插入5 次删除
  1. 3 0
      CHANGES.txt
  2. 5 5
      src/examples/org/apache/hadoop/examples/MultiFileWordCount.java

+ 3 - 0
CHANGES.txt

@@ -61,6 +61,9 @@ Release 1.0.0 - 2011.11.27
 
   BUG FIXES
 
+    MAPREDUCE-3319. Hadoop example "multifilewc" broken in 0.20.205.0.
+    (Subroto Sanyal via mattf)
+
     HDFS-2589. Remove unnecessary hftp token fetch and renewal thread.
     (Daryn Sharp via mattf)
 

+ 5 - 5
src/examples/org/apache/hadoop/examples/MultiFileWordCount.java

@@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.FileInputFormat;
@@ -205,13 +205,13 @@ public class MultiFileWordCount extends Configured implements Tool {
    * This Mapper is similar to the one in {@link WordCount.MapClass}.
    */
   public static class MapClass extends MapReduceBase
-    implements Mapper<WordOffset, Text, Text, IntWritable> {
+    implements Mapper<WordOffset, Text, Text, LongWritable> {
 
-    private final static IntWritable one = new IntWritable(1);
+    private final static LongWritable one = new LongWritable(1);
     private Text word = new Text();
     
     public void map(WordOffset key, Text value,
-        OutputCollector<Text, IntWritable> output, Reporter reporter)
+        OutputCollector<Text, LongWritable> output, Reporter reporter)
         throws IOException {
       
       String line = value.toString();
@@ -244,7 +244,7 @@ public class MultiFileWordCount extends Configured implements Tool {
     // the keys are words (strings)
     job.setOutputKeyClass(Text.class);
     // the values are counts (ints)
-    job.setOutputValueClass(IntWritable.class);
+    job.setOutputValueClass(LongWritable.class);
 
     //use the defined mapper
     job.setMapperClass(MapClass.class);