瀏覽代碼

MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support binary compatibility with hadoop-1 examples. Contributed by Zhijie Shen.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1480394 13f79535-47bb-0310-9956-ffa450edef68
Vinod Kumar Vavilapalli 12 年之前
父節點
當前提交
8888d3fc49

+ 3 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -221,6 +221,9 @@ Release 2.0.5-beta - UNRELEASED
     MAPREDUCE-5036. Default shuffle handler port should not be 8080.
     MAPREDUCE-5036. Default shuffle handler port should not be 8080.
     (Sandy Ryza via tomwhite)
     (Sandy Ryza via tomwhite)
 
 
+    MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support
+    binary compatibility with hadoop-1 examples. (Zhijie Shen via vinodkv)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     MAPREDUCE-4974. Optimising the LineRecordReader initialize() method 
     MAPREDUCE-4974. Optimising the LineRecordReader initialize() method 

+ 30 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java

@@ -102,15 +102,17 @@ public class ValueAggregatorJob {
   
   
   /**
   /**
    * Create an Aggregate based map/reduce job.
    * Create an Aggregate based map/reduce job.
-   * 
+   *
    * @param args the arguments used for job creation. Generic hadoop
    * @param args the arguments used for job creation. Generic hadoop
    * arguments are accepted.
    * arguments are accepted.
+   * @param caller the the caller class.
    * @return a JobConf object ready for submission.
    * @return a JobConf object ready for submission.
-   * 
+   *
    * @throws IOException
    * @throws IOException
    * @see GenericOptionsParser
    * @see GenericOptionsParser
    */
    */
-  public static JobConf createValueAggregatorJob(String args[])
+  @SuppressWarnings("rawtypes")
+  public static JobConf createValueAggregatorJob(String args[], Class<?> caller)
     throws IOException {
     throws IOException {
 
 
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
@@ -159,7 +161,7 @@ public class ValueAggregatorJob {
     }
     }
     String userJarFile = theJob.get("user.jar.file");
     String userJarFile = theJob.get("user.jar.file");
     if (userJarFile == null) {
     if (userJarFile == null) {
-      theJob.setJarByClass(ValueAggregator.class);
+      theJob.setJarByClass(caller != null ? caller : ValueAggregatorJob.class);
     } else {
     } else {
       theJob.setJar(userJarFile);
       theJob.setJar(userJarFile);
     }
     }
@@ -183,6 +185,21 @@ public class ValueAggregatorJob {
     return theJob;
     return theJob;
   }
   }
 
 
+  /**
+   * Create an Aggregate based map/reduce job.
+   * 
+   * @param args the arguments used for job creation. Generic hadoop
+   * arguments are accepted.
+   * @return a JobConf object ready for submission.
+   * 
+   * @throws IOException
+   * @see GenericOptionsParser
+   */
+  public static JobConf createValueAggregatorJob(String args[])
+    throws IOException {
+    return createValueAggregatorJob(args, ValueAggregator.class);
+  }
+
   public static JobConf createValueAggregatorJob(String args[]
   public static JobConf createValueAggregatorJob(String args[]
     , Class<? extends ValueAggregatorDescriptor>[] descriptors)
     , Class<? extends ValueAggregatorDescriptor>[] descriptors)
   throws IOException {
   throws IOException {
@@ -199,7 +216,15 @@ public class ValueAggregatorJob {
       job.set("aggregator.descriptor." + i, "UserDefined," + descriptors[i].getName());
       job.set("aggregator.descriptor." + i, "UserDefined," + descriptors[i].getName());
     }    
     }    
   }
   }
-  
+
+  public static JobConf createValueAggregatorJob(String args[],
+      Class<? extends ValueAggregatorDescriptor>[] descriptors,
+      Class<?> caller) throws IOException {
+    JobConf job = createValueAggregatorJob(args, caller);
+    setAggregatorDescriptors(job, descriptors);
+    return job;
+  }
+
   /**
   /**
    * create and run an Aggregate based map/reduce job.
    * create and run an Aggregate based map/reduce job.
    * 
    *