Browse Source

MAPREDUCE-1407. Update javadoc in mapreduce.{Mapper,Reducer} to match
actual usage. Contributed by Benoit Sigoure


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20@925523 13f79535-47bb-0310-9956-ffa450edef68

Christopher Douglas 15 years ago
parent
commit
c9b32ad7be

+ 3 - 0
CHANGES.txt

@@ -8,6 +8,9 @@ Release 0.20.3 - Unreleased
     HADOOP-6382. Add support for publishing Hadoop jars to Apache Maven
     HADOOP-6382. Add support for publishing Hadoop jars to Apache Maven
     repository. (Giridharan Kesavan via cdouglas)
     repository. (Giridharan Kesavan via cdouglas)
 
 
+    MAPREDUCE-1407. Update javadoc in mapreduce.{Mapper,Reducer} to match
+    actual usage. (Benoit Sigoure via cdouglas)
+
 Release 0.20.2 - 2010-2-19
 Release 0.20.2 - 2010-2-19
 
 
   NEW FEATURES
   NEW FEATURES

+ 4 - 4
src/mapred/org/apache/hadoop/mapreduce/Mapper.java

@@ -68,16 +68,16 @@ import org.apache.hadoop.io.compress.CompressionCodec;
  * <p>Example:</p>
  * <p>Example:</p>
  * <p><blockquote><pre>
  * <p><blockquote><pre>
  * public class TokenCounterMapper 
  * public class TokenCounterMapper 
- *     extends Mapper<Object, Text, Text, IntWritable>{
+ *     extends Mapper&lt;Object, Text, Text, IntWritable&gt;{
  *    
  *    
  *   private final static IntWritable one = new IntWritable(1);
  *   private final static IntWritable one = new IntWritable(1);
  *   private Text word = new Text();
  *   private Text word = new Text();
  *   
  *   
- *   public void map(Object key, Text value, Context context) throws IOException {
+ *   public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
  *     StringTokenizer itr = new StringTokenizer(value.toString());
  *     StringTokenizer itr = new StringTokenizer(value.toString());
  *     while (itr.hasMoreTokens()) {
  *     while (itr.hasMoreTokens()) {
  *       word.set(itr.nextToken());
  *       word.set(itr.nextToken());
- *       context.collect(word, one);
+ *       context.write(word, one);
  *     }
  *     }
  *   }
  *   }
  * }
  * }
@@ -145,4 +145,4 @@ public class Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
     }
     }
     cleanup(context);
     cleanup(context);
   }
   }
-}
+}

+ 6 - 6
src/mapred/org/apache/hadoop/mapreduce/Reducer.java

@@ -84,7 +84,7 @@ import org.apache.hadoop.mapred.RawKeyValueIterator;
  *   
  *   
  *   <p>In this phase the 
  *   <p>In this phase the 
  *   {@link #reduce(Object, Iterable, Context)}
  *   {@link #reduce(Object, Iterable, Context)}
- *   method is called for each <code>&lt;key, (collection of values)></code> in
+ *   method is called for each <code>&lt;key, (collection of values)&gt;</code> in
  *   the sorted inputs.</p>
  *   the sorted inputs.</p>
  *   <p>The output of the reduce task is typically written to a 
  *   <p>The output of the reduce task is typically written to a 
  *   {@link RecordWriter} via 
  *   {@link RecordWriter} via 
@@ -96,18 +96,18 @@ import org.apache.hadoop.mapred.RawKeyValueIterator;
  * 
  * 
  * <p>Example:</p>
  * <p>Example:</p>
  * <p><blockquote><pre>
  * <p><blockquote><pre>
- * public class IntSumReducer<Key> extends Reducer<Key,IntWritable,
- *                                                 Key,IntWritable> {
+ * public class IntSumReducer&lt;Key&gt; extends Reducer&lt;Key,IntWritable,
+ *                                                 Key,IntWritable&gt; {
  *   private IntWritable result = new IntWritable();
  *   private IntWritable result = new IntWritable();
  * 
  * 
- *   public void reduce(Key key, Iterable<IntWritable> values, 
- *                      Context context) throws IOException {
+ *   public void reduce(Key key, Iterable&lt;IntWritable&gt; values,
+ *                      Context context) throws IOException, InterruptedException {
  *     int sum = 0;
  *     int sum = 0;
  *     for (IntWritable val : values) {
  *     for (IntWritable val : values) {
  *       sum += val.get();
  *       sum += val.get();
  *     }
  *     }
  *     result.set(sum);
  *     result.set(sum);
- *     context.collect(key, result);
+ *     context.write(key, result);
  *   }
  *   }
  * }
  * }
  * </pre></blockquote></p>
  * </pre></blockquote></p>