浏览代码

HADOOP-2268. Fix org.apache.hadoop.mapred.jobcontrol classes to use the List/Map interfaces rather than concrete ArrayList/HashMap classes internally. Contributed by Adrian Woodhead.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@610541 13f79535-47bb-0310-9956-ffa450edef68
Arun Murthy 17 年之前
父节点
当前提交
49750732a7

+ 4 - 0
CHANGES.txt

@@ -181,6 +181,10 @@ Trunk (unreleased changes)
     HADOOP-2547. Removes use of a 'magic number' in build.xml. 
     (Hrishikesh via nigel)
 
+    HADOOP-2268. Fix org.apache.hadoop.mapred.jobcontrol classes to use the
+    List/Map interfaces rather than concrete ArrayList/HashMap classes
+    internally. (Adrian Woodhead via acmurthy)
+
   OPTIMIZATIONS
 
     HADOOP-1898.  Release the lock protecting the last time of the last stack

+ 5 - 4
src/java/org/apache/hadoop/mapred/jobcontrol/Job.java

@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.util.StringUtils;
 
 import java.util.ArrayList;
+import java.util.List;
 import java.io.IOException;
 
 /** This class encapsulates a MapReduce job and its dependency. It monitors 
@@ -58,7 +59,7 @@ public class Job {
   private String jobName;		// external name, assigned/used by client app
   private String message;		// some info for human consumption, 
   // e.g. the reason why the job failed
-  private ArrayList dependingJobs;	// the jobs the current job depends on
+  private ArrayList<Job> dependingJobs;	// the jobs the current job depends on
 	
   private JobClient jc = null;		// the map reduce job client
 	
@@ -67,7 +68,7 @@ public class Job {
    * @param jobConf a mapred job configuration representing a job to be executed.
    * @param dependingJobs an array of jobs the current job depends on
    */
-  public Job(JobConf jobConf, ArrayList dependingJobs) throws IOException {
+  public Job(JobConf jobConf, ArrayList<Job> dependingJobs) throws IOException {
     this.theJobConf = jobConf;
     this.dependingJobs = dependingJobs;
     this.state = Job.WAITING;
@@ -202,7 +203,7 @@ public class Job {
   /**
    * @return the depending jobs of this job
    */
-  public ArrayList getDependingJobs() {
+  public ArrayList<Job> getDependingJobs() {
     return this.dependingJobs;
   }
   
@@ -216,7 +217,7 @@ public class Job {
   public synchronized boolean addDependingJob(Job dependingJob) {
     if (this.state == Job.WAITING) { //only allowed to add jobs when waiting
       if (this.dependingJobs == null) {
-        this.dependingJobs = new ArrayList();
+        this.dependingJobs = new ArrayList<Job>();
       }
       return this.dependingJobs.add(dependingJob);
     } else {

+ 14 - 17
src/java/org/apache/hadoop/mapred/jobcontrol/JobControl.java

@@ -21,7 +21,7 @@ package org.apache.hadoop.mapred.jobcontrol;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Hashtable;
-import java.util.Iterator;
+import java.util.Map;
 
 /** This class encapsulates a set of MapReduce jobs and its dependency. It tracks 
  *  the states of the jobs by placing them into different tables according to their 
@@ -48,11 +48,11 @@ public class JobControl implements Runnable{
 	
   private int runnerState;			// the thread state
 	
-  private Hashtable<String, Job> waitingJobs;
-  private Hashtable<String, Job> readyJobs;
-  private Hashtable<String, Job> runningJobs;
-  private Hashtable<String, Job> successfulJobs;
-  private Hashtable<String, Job> failedJobs;
+  private Map<String, Job> waitingJobs;
+  private Map<String, Job> readyJobs;
+  private Map<String, Job> runningJobs;
+  private Map<String, Job> successfulJobs;
+  private Map<String, Job> failedJobs;
 	
   private long nextJobID;
   private String groupName;
@@ -70,17 +70,15 @@ public class JobControl implements Runnable{
     this.nextJobID = -1;
     this.groupName = groupName;
     this.runnerState = JobControl.READY;
-		
   }
 	
-  private static ArrayList<Job> toArrayList(Hashtable<String, Job> jobs) {
+  private static ArrayList<Job> toArrayList(Map<String, Job> jobs) {
     ArrayList<Job> retv = new ArrayList<Job>();
     synchronized (jobs) {
       for (Job job : jobs.values()) {
         retv.add(job);
       }
     }
-		
     return retv;
   }
 	
@@ -121,19 +119,19 @@ public class JobControl implements Runnable{
     return this.groupName + this.nextJobID;
   }
 	
-  private static void addToQueue(Job aJob, Hashtable<String, Job> queue) {
+  private static void addToQueue(Job aJob, Map<String, Job> queue) {
     synchronized(queue) {
       queue.put(aJob.getJobID(), aJob);
     }		
   }
 	
   private void addToQueue(Job aJob) {
-    Hashtable<String, Job> queue = getQueue(aJob.getState());
+    Map<String, Job> queue = getQueue(aJob.getState());
     addToQueue(aJob, queue);	
   }
 	
-  private Hashtable<String, Job> getQueue(int state) {
-    Hashtable<String, Job> retv = null;
+  private Map<String, Job> getQueue(int state) {
+    Map<String, Job> retv = null;
     if (state == Job.WAITING) {
       retv = this.waitingJobs;
     } else if (state == Job.READY) {
@@ -146,7 +144,6 @@ public class JobControl implements Runnable{
       retv = this.failedJobs;
     } 
     return retv;
-			
   }
 
   /**
@@ -207,7 +204,7 @@ public class JobControl implements Runnable{
 	
   synchronized private void checkRunningJobs() {
 		
-    Hashtable<String, Job> oldJobs = null;
+    Map<String, Job> oldJobs = null;
     oldJobs = this.runningJobs;
     this.runningJobs = new Hashtable<String, Job>();
 		
@@ -224,7 +221,7 @@ public class JobControl implements Runnable{
   }
 	
   synchronized private void checkWaitingJobs() {
-    Hashtable<String, Job> oldJobs = null;
+    Map<String, Job> oldJobs = null;
     oldJobs = this.waitingJobs;
     this.waitingJobs = new Hashtable<String, Job>();
 		
@@ -241,7 +238,7 @@ public class JobControl implements Runnable{
   }
 	
   synchronized private void startReadyJobs() {
-    Hashtable<String, Job> oldJobs = null;
+    Map<String, Job> oldJobs = null;
     oldJobs = this.readyJobs;
     this.readyJobs = new Hashtable<String, Job>();
 		

+ 1 - 1
src/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java

@@ -82,7 +82,7 @@ public class ValueAggregatorJob {
     , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
     
     JobControl theControl = new JobControl("ValueAggregatorJobs");
-    ArrayList dependingJobs = new ArrayList();
+    ArrayList<Job> dependingJobs = new ArrayList<Job>();
     JobConf aJobConf = createValueAggregatorJob(args);
     if(descriptors != null)
       setAggregatorDescriptors(aJobConf, descriptors);