浏览代码

MAPREDUCE-6253. Update use of Iterator to Iterable. Contributed by Ray
Chiang.

Devaraj K 10 年之前
父节点
当前提交
3a330aac7a

+ 2 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -278,6 +278,8 @@ Release 2.7.0 - UNRELEASED
 
     MAPREDUCE-6227. DFSIO for truncate. (shv via yliu)
 
+    MAPREDUCE-6253. Update use of Iterator to Iterable. (Ray Chiang via devaraj)
+
   OPTIMIZATIONS
 
     MAPREDUCE-6169. MergeQueue should release reference to the current item 

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java

@@ -374,11 +374,10 @@ public class JobHistoryEventHandler extends AbstractService
 
     // Process JobUnsuccessfulCompletionEvent for jobIds which still haven't
     // closed their event writers
-    Iterator<JobId> jobIt = fileMap.keySet().iterator();
     if(forceJobCompletion) {
-      while (jobIt.hasNext()) {
-        JobId toClose = jobIt.next();
-        MetaInfo mi = fileMap.get(toClose);
+      for (Map.Entry<JobId,MetaInfo> jobIt : fileMap.entrySet()) {
+        JobId toClose = jobIt.getKey();
+        MetaInfo mi = jobIt.getValue();
         if(mi != null && mi.isWriterActive()) {
           LOG.warn("Found jobId " + toClose
             + " to have not been closed. Will close");

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java

@@ -441,8 +441,8 @@ public class QueueManager {
   synchronized Map<String, JobQueueInfo> getJobQueueInfoMapping() {
     Map<String, JobQueueInfo> m = new HashMap<String, JobQueueInfo>();
 
-    for (String key : allQueues.keySet()) {
-      m.put(key, allQueues.get(key).getJobQueueInfo());
+    for (Map.Entry<String,Queue> entry : allQueues.entrySet()) {
+      m.put(entry.getKey(), entry.getValue().getJobQueueInfo());
     }
 
     return m;

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java

@@ -227,10 +227,10 @@ public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job
     completionEvents = new LinkedList<TaskAttemptCompletionEvent>();
     List<TaskAttempt> allTaskAttempts = new LinkedList<TaskAttempt>();
     int numMapAttempts = 0;
-    for (TaskId taskId : tasks.keySet()) {
-      Task task = tasks.get(taskId);
-      for (TaskAttemptId taskAttemptId : task.getAttempts().keySet()) {
-        TaskAttempt taskAttempt = task.getAttempts().get(taskAttemptId);
+    for (Map.Entry<TaskId,Task> taskEntry : tasks.entrySet()) {
+      Task task = taskEntry.getValue();
+      for (Map.Entry<TaskAttemptId,TaskAttempt> taskAttemptEntry : task.getAttempts().entrySet()) {
+        TaskAttempt taskAttempt = taskAttemptEntry.getValue();
         allTaskAttempts.add(taskAttempt);
         if (task.getType() == TaskType.MAP) {
           ++numMapAttempts;