瀏覽代碼

MAPREDUCE-3098. Fixed RM and MR AM to report YarnApplicationState and application's FinalStatus separately. Contributed by Hitesh Shah.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1177633 13f79535-47bb-0310-9956-ffa450edef68
Vinod Kumar Vavilapalli 13 年之前
父節點
當前提交
063e33a862
共有 45 個文件被更改,包括 860 次插入576 次删除
  1. 3 0
      hadoop-mapreduce-project/CHANGES.txt
  2. 12 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
  3. 30 30
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
  4. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
  5. 41 40
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
  6. 28 34
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java
  7. 41 41
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
  8. 24 21
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
  9. 4 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
  10. 0 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java
  11. 21 21
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java
  12. 17 16
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/FinishApplicationMasterRequest.java
  13. 29 15
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java
  14. 11 13
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.java
  15. 12 12
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationMaster.java
  16. 38 25
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationReport.java
  17. 42 0
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/FinalApplicationStatus.java
  18. 13 13
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/YarnApplicationState.java
  19. 23 26
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationMasterPBImpl.java
  20. 64 34
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java
  21. 28 15
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/ProtoUtils.java
  22. 16 9
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto
  23. 3 3
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
  24. 13 10
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java
  25. 3 0
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
  26. 26 20
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java
  27. 7 7
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
  28. 13 12
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java
  29. 46 21
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
  30. 12 10
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java
  31. 30 28
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
  32. 7 6
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/event/RMAppAttemptUnregistrationEvent.java
  33. 3 2
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java
  34. 2 3
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java
  35. 16 16
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockAM.java
  36. 12 6
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java
  37. 5 5
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMLaunchFailure.java
  38. 12 12
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMRestart.java
  39. 7 7
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestASMStateMachine.java
  40. 12 12
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestApplicationMasterExpiry.java
  41. 9 9
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestSchedulerNegotiator.java
  42. 3 2
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/MockRMApp.java
  43. 12 1
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java
  44. 105 0
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java
  45. 1 1
      hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestQueueMetrics.java

+ 3 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -324,6 +324,9 @@ Release 0.23.0 - Unreleased
     MAPREDUCE-3001. Added task-specific counters to AppMaster and JobHistory
     MAPREDUCE-3001. Added task-specific counters to AppMaster and JobHistory
     web-UIs. (Robert Joseph Evans via vinodkv)
     web-UIs. (Robert Joseph Evans via vinodkv)
 
 
+    MAPREDUCE-3098. Fixed RM and MR AM to report YarnApplicationState and
+    application's FinalStatus separately. (Hitesh Shah via vinodkv)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     MAPREDUCE-2026. Make JobTracker.getJobCounters() and
     MAPREDUCE-2026. Make JobTracker.getJobCounters() and

+ 12 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java

@@ -46,6 +46,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -75,7 +76,7 @@ public abstract class RMCommunicator extends AbstractService  {
 
 
   private final RecordFactory recordFactory =
   private final RecordFactory recordFactory =
       RecordFactoryProvider.getRecordFactory(null);
       RecordFactoryProvider.getRecordFactory(null);
-  
+
   private final AppContext context;
   private final AppContext context;
   private Job job;
   private Job job;
 
 
@@ -146,7 +147,7 @@ public abstract class RMCommunicator extends AbstractService  {
 
 
   protected void register() {
   protected void register() {
     //Register
     //Register
-    String host = 
+    String host =
       clientService.getBindAddress().getAddress().getHostAddress();
       clientService.getBindAddress().getAddress().getHostAddress();
     try {
     try {
       RegisterApplicationMasterRequest request =
       RegisterApplicationMasterRequest request =
@@ -155,7 +156,7 @@ public abstract class RMCommunicator extends AbstractService  {
       request.setHost(host);
       request.setHost(host);
       request.setRpcPort(clientService.getBindAddress().getPort());
       request.setRpcPort(clientService.getBindAddress().getPort());
       request.setTrackingUrl(host + ":" + clientService.getHttpPort());
       request.setTrackingUrl(host + ":" + clientService.getHttpPort());
-      RegisterApplicationMasterResponse response = 
+      RegisterApplicationMasterResponse response =
         scheduler.registerApplicationMaster(request);
         scheduler.registerApplicationMaster(request);
       minContainerCapability = response.getMinimumResourceCapability();
       minContainerCapability = response.getMinimumResourceCapability();
       maxContainerCapability = response.getMaximumResourceCapability();
       maxContainerCapability = response.getMaximumResourceCapability();
@@ -169,29 +170,29 @@ public abstract class RMCommunicator extends AbstractService  {
 
 
   protected void unregister() {
   protected void unregister() {
     try {
     try {
-      String finalState = "RUNNING";
+      FinalApplicationStatus finishState = FinalApplicationStatus.UNDEFINED;
       if (job.getState() == JobState.SUCCEEDED) {
       if (job.getState() == JobState.SUCCEEDED) {
-        finalState = "SUCCEEDED";
+        finishState = FinalApplicationStatus.SUCCEEDED;
       } else if (job.getState() == JobState.KILLED) {
       } else if (job.getState() == JobState.KILLED) {
-        finalState = "KILLED";
+        finishState = FinalApplicationStatus.KILLED;
       } else if (job.getState() == JobState.FAILED
       } else if (job.getState() == JobState.FAILED
           || job.getState() == JobState.ERROR) {
           || job.getState() == JobState.ERROR) {
-        finalState = "FAILED";
+        finishState = FinalApplicationStatus.FAILED;
       }
       }
       StringBuffer sb = new StringBuffer();
       StringBuffer sb = new StringBuffer();
       for (String s : job.getDiagnostics()) {
       for (String s : job.getDiagnostics()) {
         sb.append(s).append("\n");
         sb.append(s).append("\n");
       }
       }
       LOG.info("Setting job diagnostics to " + sb.toString());
       LOG.info("Setting job diagnostics to " + sb.toString());
-      
-      String historyUrl = JobHistoryUtils.getHistoryUrl(getConfig(), 
+
+      String historyUrl = JobHistoryUtils.getHistoryUrl(getConfig(),
           context.getApplicationID());
           context.getApplicationID());
       LOG.info("History url is " + historyUrl);
       LOG.info("History url is " + historyUrl);
 
 
       FinishApplicationMasterRequest request =
       FinishApplicationMasterRequest request =
           recordFactory.newRecordInstance(FinishApplicationMasterRequest.class);
           recordFactory.newRecordInstance(FinishApplicationMasterRequest.class);
       request.setAppAttemptId(this.applicationAttemptId);
       request.setAppAttemptId(this.applicationAttemptId);
-      request.setFinalState(finalState.toString());
+      request.setFinishApplicationStatus(finishState);
       request.setDiagnostics(sb.toString());
       request.setDiagnostics(sb.toString());
       request.setTrackingUrl(historyUrl);
       request.setTrackingUrl(historyUrl);
       scheduler.finishApplicationMaster(request);
       scheduler.finishApplicationMaster(request);
@@ -203,7 +204,7 @@ public abstract class RMCommunicator extends AbstractService  {
   protected Resource getMinContainerCapability() {
   protected Resource getMinContainerCapability() {
     return minContainerCapability;
     return minContainerCapability;
   }
   }
-  
+
   protected Resource getMaxContainerCapability() {
   protected Resource getMaxContainerCapability() {
     return maxContainerCapability;
     return maxContainerCapability;
   }
   }

+ 30 - 30
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java

@@ -45,7 +45,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.NodeReport;
 import org.apache.hadoop.yarn.api.records.NodeReport;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueState;
 import org.apache.hadoop.yarn.api.records.QueueState;
@@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 public class TypeConverter {
 public class TypeConverter {
 
 
   private static RecordFactory recordFactory;
   private static RecordFactory recordFactory;
-  
+
   static {
   static {
     recordFactory = RecordFactoryProvider.getRecordFactory(null);
     recordFactory = RecordFactoryProvider.getRecordFactory(null);
   }
   }
@@ -75,7 +75,7 @@ public class TypeConverter {
   public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) {
   public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) {
     JobId jobId = recordFactory.newRecordInstance(JobId.class);
     JobId jobId = recordFactory.newRecordInstance(JobId.class);
     jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
     jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
-    
+
     ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
     ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
     appId.setId(id.getId());
     appId.setId(id.getId());
     appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier()));
     appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier()));
@@ -137,7 +137,7 @@ public class TypeConverter {
     }
     }
     return TaskAttemptState.valueOf(state.toString());
     return TaskAttemptState.valueOf(state.toString());
   }
   }
-  
+
   public static Phase toYarn(org.apache.hadoop.mapred.TaskStatus.Phase phase) {
   public static Phase toYarn(org.apache.hadoop.mapred.TaskStatus.Phase phase) {
     switch (phase) {
     switch (phase) {
     case STARTING:
     case STARTING:
@@ -161,7 +161,7 @@ public class TypeConverter {
     TaskCompletionEvent[] oldEvents =
     TaskCompletionEvent[] oldEvents =
         new TaskCompletionEvent[newEvents.length];
         new TaskCompletionEvent[newEvents.length];
     int i = 0;
     int i = 0;
-    for (TaskAttemptCompletionEvent newEvent 
+    for (TaskAttemptCompletionEvent newEvent
         : newEvents) {
         : newEvents) {
       oldEvents[i++] = fromYarn(newEvent);
       oldEvents[i++] = fromYarn(newEvent);
     }
     }
@@ -215,19 +215,19 @@ public class TypeConverter {
     taskAttemptId.setId(id.getId());
     taskAttemptId.setId(id.getId());
     return taskAttemptId;
     return taskAttemptId;
   }
   }
-  
+
   public static org.apache.hadoop.mapreduce.Counters fromYarn(
   public static org.apache.hadoop.mapreduce.Counters fromYarn(
       Counters yCntrs) {
       Counters yCntrs) {
     if (yCntrs == null) {
     if (yCntrs == null) {
       return null;
       return null;
     }
     }
-    org.apache.hadoop.mapreduce.Counters counters = 
+    org.apache.hadoop.mapreduce.Counters counters =
       new org.apache.hadoop.mapreduce.Counters();
       new org.apache.hadoop.mapreduce.Counters();
     for (CounterGroup yGrp : yCntrs.getAllCounterGroups().values()) {
     for (CounterGroup yGrp : yCntrs.getAllCounterGroups().values()) {
       counters.addGroup(yGrp.getName(), yGrp.getDisplayName());
       counters.addGroup(yGrp.getName(), yGrp.getDisplayName());
       for (Counter yCntr : yGrp.getAllCounters().values()) {
       for (Counter yCntr : yGrp.getAllCounters().values()) {
-        org.apache.hadoop.mapreduce.Counter c = 
-          counters.findCounter(yGrp.getName(), 
+        org.apache.hadoop.mapreduce.Counter c =
+          counters.findCounter(yGrp.getName(),
               yCntr.getName());
               yCntr.getName());
         c.setValue(yCntr.getValue());
         c.setValue(yCntr.getValue());
       }
       }
@@ -292,16 +292,16 @@ public class TypeConverter {
     jobStatus.setFailureInfo(jobreport.getDiagnostics());
     jobStatus.setFailureInfo(jobreport.getDiagnostics());
     return jobStatus;
     return jobStatus;
   }
   }
-  
+
   public static org.apache.hadoop.mapreduce.QueueState fromYarn(
   public static org.apache.hadoop.mapreduce.QueueState fromYarn(
       QueueState state) {
       QueueState state) {
-    org.apache.hadoop.mapreduce.QueueState qState = 
+    org.apache.hadoop.mapreduce.QueueState qState =
       org.apache.hadoop.mapreduce.QueueState.getState(
       org.apache.hadoop.mapreduce.QueueState.getState(
         state.toString().toLowerCase());
         state.toString().toLowerCase());
     return qState;
     return qState;
   }
   }
 
 
-  
+
   public static int fromYarn(JobState state) {
   public static int fromYarn(JobState state) {
     switch (state) {
     switch (state) {
     case NEW:
     case NEW:
@@ -339,7 +339,7 @@ public class TypeConverter {
     }
     }
     throw new YarnException("Unrecognized task state: " + state);
     throw new YarnException("Unrecognized task state: " + state);
   }
   }
-  
+
   public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) {
   public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) {
     String[] diagnostics = null;
     String[] diagnostics = null;
     if (report.getDiagnosticsList() != null) {
     if (report.getDiagnosticsList() != null) {
@@ -351,14 +351,14 @@ public class TypeConverter {
     } else {
     } else {
       diagnostics = new String[0];
       diagnostics = new String[0];
     }
     }
-    
-    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()), 
+
+    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()),
         report.getProgress(), report.getTaskState().toString(),
         report.getProgress(), report.getTaskState().toString(),
       diagnostics, fromYarn(report.getTaskState()), report.getStartTime(), report.getFinishTime(),
       diagnostics, fromYarn(report.getTaskState()), report.getStartTime(), report.getFinishTime(),
       fromYarn(report.getCounters()));
       fromYarn(report.getCounters()));
-    List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts 
+    List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts
           = new ArrayList<org.apache.hadoop.mapreduce.TaskAttemptID>();
           = new ArrayList<org.apache.hadoop.mapreduce.TaskAttemptID>();
-    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id 
+    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id
         : report.getRunningAttemptsList()) {
         : report.getRunningAttemptsList()) {
       runningAtts.add(fromYarn(id));
       runningAtts.add(fromYarn(id));
     }
     }
@@ -368,7 +368,7 @@ public class TypeConverter {
     }
     }
     return rep;
     return rep;
   }
   }
-  
+
   public static List<TaskReport> fromYarn(
   public static List<TaskReport> fromYarn(
       List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports) {
       List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports) {
     List<TaskReport> reports = new ArrayList<TaskReport>();
     List<TaskReport> reports = new ArrayList<TaskReport>();
@@ -377,14 +377,14 @@ public class TypeConverter {
     }
     }
     return reports;
     return reports;
   }
   }
-  
-  public static JobStatus.State fromYarn(ApplicationState state) {
+
+  public static JobStatus.State fromYarn(YarnApplicationState state) {
     switch (state) {
     switch (state) {
     case SUBMITTED:
     case SUBMITTED:
       return State.PREP;
       return State.PREP;
     case RUNNING:
     case RUNNING:
       return State.RUNNING;
       return State.RUNNING;
-    case SUCCEEDED:
+    case FINISHED:
       return State.SUCCEEDED;
       return State.SUCCEEDED;
     case FAILED:
     case FAILED:
       return State.FAILED;
       return State.FAILED;
@@ -396,7 +396,7 @@ public class TypeConverter {
 
 
   private static final String TT_NAME_PREFIX = "tracker_";
   private static final String TT_NAME_PREFIX = "tracker_";
   public static TaskTrackerInfo fromYarn(NodeReport node) {
   public static TaskTrackerInfo fromYarn(NodeReport node) {
-    TaskTrackerInfo taskTracker = 
+    TaskTrackerInfo taskTracker =
       new TaskTrackerInfo(TT_NAME_PREFIX + node.getNodeId().toString());
       new TaskTrackerInfo(TT_NAME_PREFIX + node.getNodeId().toString());
     return taskTracker;
     return taskTracker;
   }
   }
@@ -417,7 +417,7 @@ public class TypeConverter {
       new JobStatus(
       new JobStatus(
           TypeConverter.fromYarn(application.getApplicationId()),
           TypeConverter.fromYarn(application.getApplicationId()),
           0.0f, 0.0f, 0.0f, 0.0f,
           0.0f, 0.0f, 0.0f, 0.0f,
-          TypeConverter.fromYarn(application.getState()),
+          TypeConverter.fromYarn(application.getYarnApplicationState()),
           org.apache.hadoop.mapreduce.JobPriority.NORMAL,
           org.apache.hadoop.mapreduce.JobPriority.NORMAL,
           application.getUser(), application.getName(),
           application.getUser(), application.getName(),
           application.getQueue(), jobFile, trackingUrl
           application.getQueue(), jobFile, trackingUrl
@@ -433,7 +433,7 @@ public class TypeConverter {
     List<JobStatus> jobStatuses = new ArrayList<JobStatus>();
     List<JobStatus> jobStatuses = new ArrayList<JobStatus>();
     for (ApplicationReport application : applications) {
     for (ApplicationReport application : applications) {
       // each applicationReport has its own jobFile
       // each applicationReport has its own jobFile
-      org.apache.hadoop.mapreduce.JobID jobId = 
+      org.apache.hadoop.mapreduce.JobID jobId =
           TypeConverter.fromYarn(application.getApplicationId());
           TypeConverter.fromYarn(application.getApplicationId());
       jobStatuses.add(TypeConverter.fromYarn(application,
       jobStatuses.add(TypeConverter.fromYarn(application,
           MRApps.getJobFile(conf, application.getUser(), jobId)));
           MRApps.getJobFile(conf, application.getUser(), jobId)));
@@ -441,14 +441,14 @@ public class TypeConverter {
     return jobStatuses.toArray(new JobStatus[jobStatuses.size()]);
     return jobStatuses.toArray(new JobStatus[jobStatuses.size()]);
   }
   }
 
 
-  
-  public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo 
+
+  public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo
       queueInfo, Configuration conf) {
       queueInfo, Configuration conf) {
     return new QueueInfo(queueInfo.getQueueName(),queueInfo.toString(),
     return new QueueInfo(queueInfo.getQueueName(),queueInfo.toString(),
         fromYarn(queueInfo.getQueueState()), TypeConverter.fromYarnApps(
         fromYarn(queueInfo.getQueueState()), TypeConverter.fromYarnApps(
         queueInfo.getApplications(), conf));
         queueInfo.getApplications(), conf));
   }
   }
-  
+
   public static QueueInfo[] fromYarnQueueInfo(
   public static QueueInfo[] fromYarnQueueInfo(
       List<org.apache.hadoop.yarn.api.records.QueueInfo> queues,
       List<org.apache.hadoop.yarn.api.records.QueueInfo> queues,
       Configuration conf) {
       Configuration conf) {
@@ -467,9 +467,9 @@ public class TypeConverter {
       for (QueueACL qAcl : aclInfo.getUserAcls()) {
       for (QueueACL qAcl : aclInfo.getUserAcls()) {
         operations.add(qAcl.toString());
         operations.add(qAcl.toString());
       }
       }
-      
-      QueueAclsInfo acl = 
-        new QueueAclsInfo(aclInfo.getQueueName(), 
+
+      QueueAclsInfo acl =
+        new QueueAclsInfo(aclInfo.getQueueName(),
             operations.toArray(new String[operations.size()]));
             operations.toArray(new String[operations.size()]));
       acls.add(acl);
       acls.add(acl);
     }
     }

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java

@@ -21,7 +21,7 @@ import junit.framework.Assert;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
@@ -35,11 +35,11 @@ public class TestTypeConverter {
   @Test
   @Test
   public void testFromYarn() throws Exception {
   public void testFromYarn() throws Exception {
     int appStartTime = 612354;
     int appStartTime = 612354;
-    ApplicationState state = ApplicationState.RUNNING;
+    YarnApplicationState state = YarnApplicationState.RUNNING;
     ApplicationId applicationId = new ApplicationIdPBImpl();
     ApplicationId applicationId = new ApplicationIdPBImpl();
     ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl();
     ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl();
     applicationReport.setApplicationId(applicationId);
     applicationReport.setApplicationId(applicationId);
-    applicationReport.setState(state);
+    applicationReport.setYarnApplicationState(state);
     applicationReport.setStartTime(appStartTime);
     applicationReport.setStartTime(appStartTime);
     applicationReport.setUser("TestTypeConverter-user");
     applicationReport.setUser("TestTypeConverter-user");
     JobStatus jobStatus = TypeConverter.fromYarn(applicationReport, "dummy-jobfile");
     JobStatus jobStatus = TypeConverter.fromYarn(applicationReport, "dummy-jobfile");
@@ -56,7 +56,7 @@ public class TestTypeConverter {
     ApplicationReport mockReport = mock(ApplicationReport.class);
     ApplicationReport mockReport = mock(ApplicationReport.class);
     when(mockReport.getTrackingUrl()).thenReturn("dummy-tracking-url");
     when(mockReport.getTrackingUrl()).thenReturn("dummy-tracking-url");
     when(mockReport.getApplicationId()).thenReturn(mockAppId);
     when(mockReport.getApplicationId()).thenReturn(mockAppId);
-    when(mockReport.getState()).thenReturn(ApplicationState.KILLED);
+    when(mockReport.getYarnApplicationState()).thenReturn(YarnApplicationState.KILLED);
     when(mockReport.getUser()).thenReturn("dummy-user");
     when(mockReport.getUser()).thenReturn("dummy-user");
     when(mockReport.getQueue()).thenReturn("dummy-queue");
     when(mockReport.getQueue()).thenReturn("dummy-queue");
     String jobFile = "dummy-path/job.xml";
     String jobFile = "dummy-path/job.xml";

+ 41 - 40
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java

@@ -61,7 +61,7 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -89,7 +89,7 @@ public class ClientServiceDelegate {
   private static String UNKNOWN_USER = "Unknown User";
   private static String UNKNOWN_USER = "Unknown User";
   private String trackingUrl;
   private String trackingUrl;
 
 
-  public ClientServiceDelegate(Configuration conf, ResourceMgrDelegate rm, 
+  public ClientServiceDelegate(Configuration conf, ResourceMgrDelegate rm,
       JobID jobId, MRClientProtocol historyServerProxy) {
       JobID jobId, MRClientProtocol historyServerProxy) {
     this.conf = new Configuration(conf); // Cloning for modifying.
     this.conf = new Configuration(conf); // Cloning for modifying.
     // For faster redirects from AM to HS.
     // For faster redirects from AM to HS.
@@ -103,7 +103,7 @@ public class ClientServiceDelegate {
 
 
   // Get the instance of the NotRunningJob corresponding to the specified
   // Get the instance of the NotRunningJob corresponding to the specified
   // user and state
   // user and state
-  private NotRunningJob getNotRunningJob(ApplicationReport applicationReport, 
+  private NotRunningJob getNotRunningJob(ApplicationReport applicationReport,
       JobState state) {
       JobState state) {
     synchronized (notRunningJobs) {
     synchronized (notRunningJobs) {
       HashMap<String, NotRunningJob> map = notRunningJobs.get(state);
       HashMap<String, NotRunningJob> map = notRunningJobs.get(state);
@@ -111,8 +111,8 @@ public class ClientServiceDelegate {
         map = new HashMap<String, NotRunningJob>();
         map = new HashMap<String, NotRunningJob>();
         notRunningJobs.put(state, map);
         notRunningJobs.put(state, map);
       }
       }
-      String user = 
-          (applicationReport == null) ? 
+      String user =
+          (applicationReport == null) ?
               UNKNOWN_USER : applicationReport.getUser();
               UNKNOWN_USER : applicationReport.getUser();
       NotRunningJob notRunningJob = map.get(user);
       NotRunningJob notRunningJob = map.get(user);
       if (notRunningJob == null) {
       if (notRunningJob == null) {
@@ -135,7 +135,7 @@ public class ClientServiceDelegate {
       trackingUrl = application.getTrackingUrl();
       trackingUrl = application.getTrackingUrl();
     }
     }
     String serviceAddr = null;
     String serviceAddr = null;
-    while (application == null || ApplicationState.RUNNING.equals(application.getState())) {
+    while (application == null || YarnApplicationState.RUNNING.equals(application.getYarnApplicationState())) {
       if (application == null) {
       if (application == null) {
         LOG.info("Could not get Job info from RM for job " + jobId
         LOG.info("Could not get Job info from RM for job " + jobId
             + ". Redirecting to job history server.");
             + ". Redirecting to job history server.");
@@ -145,8 +145,8 @@ public class ClientServiceDelegate {
         if (application.getHost() == null || "".equals(application.getHost())) {
         if (application.getHost() == null || "".equals(application.getHost())) {
           LOG.debug("AM not assigned to Job. Waiting to get the AM ...");
           LOG.debug("AM not assigned to Job. Waiting to get the AM ...");
           Thread.sleep(2000);
           Thread.sleep(2000);
-   
-          LOG.debug("Application state is " + application.getState());
+
+          LOG.debug("Application state is " + application.getYarnApplicationState());
           application = rm.getApplicationReport(appId);
           application = rm.getApplicationReport(appId);
           continue;
           continue;
         }
         }
@@ -168,7 +168,7 @@ public class ClientServiceDelegate {
         //possibly the AM has crashed
         //possibly the AM has crashed
         //there may be some time before AM is restarted
         //there may be some time before AM is restarted
         //keep retrying by getting the address from RM
         //keep retrying by getting the address from RM
-        LOG.info("Could not connect to " + serviceAddr + 
+        LOG.info("Could not connect to " + serviceAddr +
         ". Waiting for getting the latest AM address...");
         ". Waiting for getting the latest AM address...");
         try {
         try {
           Thread.sleep(2000);
           Thread.sleep(2000);
@@ -189,35 +189,36 @@ public class ClientServiceDelegate {
     }
     }
 
 
     /** we just want to return if its allocating, so that we don't
     /** we just want to return if its allocating, so that we don't
-     * block on it. This is to be able to return job status 
+     * block on it. This is to be able to return job status
      * on an allocating Application.
      * on an allocating Application.
      */
      */
-    
+
     String user = application.getUser();
     String user = application.getUser();
     if (user == null) {
     if (user == null) {
       throw RPCUtil.getRemoteException("User is not set in the application report");
       throw RPCUtil.getRemoteException("User is not set in the application report");
     }
     }
-    if (application.getState() == ApplicationState.NEW ||
-        application.getState() == ApplicationState.SUBMITTED) {
+    if (application.getYarnApplicationState() == YarnApplicationState.NEW ||
+        application.getYarnApplicationState() == YarnApplicationState.SUBMITTED) {
       realProxy = null;
       realProxy = null;
       return getNotRunningJob(application, JobState.NEW);
       return getNotRunningJob(application, JobState.NEW);
     }
     }
-    
-    if (application.getState() == ApplicationState.FAILED) {
+
+    if (application.getYarnApplicationState() == YarnApplicationState.FAILED) {
       realProxy = null;
       realProxy = null;
       return getNotRunningJob(application, JobState.FAILED);
       return getNotRunningJob(application, JobState.FAILED);
     }
     }
-    
-    if (application.getState() == ApplicationState.KILLED) {
+
+    if (application.getYarnApplicationState() == YarnApplicationState.KILLED) {
       realProxy = null;
       realProxy = null;
       return getNotRunningJob(application, JobState.KILLED);
       return getNotRunningJob(application, JobState.KILLED);
     }
     }
-    
-    //History server can serve a job only if application 
+
+    //History server can serve a job only if application
     //succeeded.
     //succeeded.
-    if (application.getState() == ApplicationState.SUCCEEDED) {
-      LOG.info("Application state is completed. " +
-          "Redirecting to job history server");
+    if (application.getYarnApplicationState() == YarnApplicationState.FINISHED) {
+      LOG.info("Application state is completed. FinalApplicationStatus="
+          + application.getFinalApplicationStatus().toString()
+          + ". Redirecting to job history server");
       realProxy = checkAndGetHSProxy(application, JobState.SUCCEEDED);
       realProxy = checkAndGetHSProxy(application, JobState.SUCCEEDED);
     }
     }
     return realProxy;
     return realProxy;
@@ -241,7 +242,7 @@ public class ClientServiceDelegate {
         Configuration myConf = new Configuration(conf);
         Configuration myConf = new Configuration(conf);
         myConf.setClass(
         myConf.setClass(
             YarnConfiguration.YARN_SECURITY_INFO,
             YarnConfiguration.YARN_SECURITY_INFO,
-            SchedulerSecurityInfo.class, SecurityInfo.class); 
+            SchedulerSecurityInfo.class, SecurityInfo.class);
         YarnRPC rpc = YarnRPC.create(myConf);
         YarnRPC rpc = YarnRPC.create(myConf);
         return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
         return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
             NetUtils.createSocketAddr(serviceAddr), myConf);
             NetUtils.createSocketAddr(serviceAddr), myConf);
@@ -250,7 +251,7 @@ public class ClientServiceDelegate {
     LOG.trace("Connected to ApplicationMaster at: " + serviceAddr);
     LOG.trace("Connected to ApplicationMaster at: " + serviceAddr);
   }
   }
 
 
-  private synchronized Object invoke(String method, Class argClass, 
+  private synchronized Object invoke(String method, Class argClass,
       Object args) throws YarnRemoteException {
       Object args) throws YarnRemoteException {
     Method methodOb = null;
     Method methodOb = null;
     try {
     try {
@@ -289,10 +290,10 @@ public class ClientServiceDelegate {
     org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0);
     org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0);
       GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class);
       GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class);
       request.setJobId(jobID);
       request.setJobId(jobID);
-      Counters cnt = ((GetCountersResponse) 
+      Counters cnt = ((GetCountersResponse)
           invoke("getCounters", GetCountersRequest.class, request)).getCounters();
           invoke("getCounters", GetCountersRequest.class, request)).getCounters();
       return TypeConverter.fromYarn(cnt);
       return TypeConverter.fromYarn(cnt);
-      
+
   }
   }
 
 
   public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2)
   public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2)
@@ -304,7 +305,7 @@ public class ClientServiceDelegate {
     request.setJobId(jobID);
     request.setJobId(jobID);
     request.setFromEventId(arg1);
     request.setFromEventId(arg1);
     request.setMaxEvents(arg2);
     request.setMaxEvents(arg2);
-    List<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent> list = 
+    List<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent> list =
       ((GetTaskAttemptCompletionEventsResponse) invoke(
       ((GetTaskAttemptCompletionEventsResponse) invoke(
         "getTaskAttemptCompletionEvents", GetTaskAttemptCompletionEventsRequest.class, request)).
         "getTaskAttemptCompletionEvents", GetTaskAttemptCompletionEventsRequest.class, request)).
         getCompletionEventList();
         getCompletionEventList();
@@ -332,12 +333,12 @@ public class ClientServiceDelegate {
   }
   }
   
   
   public JobStatus getJobStatus(JobID oldJobID) throws YarnRemoteException {
   public JobStatus getJobStatus(JobID oldJobID) throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
       TypeConverter.toYarn(oldJobID);
       TypeConverter.toYarn(oldJobID);
-    GetJobReportRequest request = 
+    GetJobReportRequest request =
         recordFactory.newRecordInstance(GetJobReportRequest.class);
         recordFactory.newRecordInstance(GetJobReportRequest.class);
     request.setJobId(jobId);
     request.setJobId(jobId);
-    JobReport report = ((GetJobReportResponse) invoke("getJobReport", 
+    JobReport report = ((GetJobReportResponse) invoke("getJobReport",
         GetJobReportRequest.class, request)).getJobReport();
         GetJobReportRequest.class, request)).getJobReport();
     if (StringUtils.isEmpty(report.getJobFile())) {
     if (StringUtils.isEmpty(report.getJobFile())) {
       String jobFile = MRApps.getJobFile(conf, report.getUser(), oldJobID);
       String jobFile = MRApps.getJobFile(conf, report.getUser(), oldJobID);
@@ -351,24 +352,24 @@ public class ClientServiceDelegate {
 
 
   public org.apache.hadoop.mapreduce.TaskReport[] getTaskReports(JobID oldJobID, TaskType taskType)
   public org.apache.hadoop.mapreduce.TaskReport[] getTaskReports(JobID oldJobID, TaskType taskType)
        throws YarnRemoteException, YarnRemoteException {
        throws YarnRemoteException, YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
       TypeConverter.toYarn(oldJobID);
       TypeConverter.toYarn(oldJobID);
-    GetTaskReportsRequest request = 
+    GetTaskReportsRequest request =
         recordFactory.newRecordInstance(GetTaskReportsRequest.class);
         recordFactory.newRecordInstance(GetTaskReportsRequest.class);
     request.setJobId(jobId);
     request.setJobId(jobId);
     request.setTaskType(TypeConverter.toYarn(taskType));
     request.setTaskType(TypeConverter.toYarn(taskType));
-    
-    List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports = 
-      ((GetTaskReportsResponse) invoke("getTaskReports", GetTaskReportsRequest.class, 
+
+    List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports =
+      ((GetTaskReportsResponse) invoke("getTaskReports", GetTaskReportsRequest.class,
           request)).getTaskReportList();
           request)).getTaskReportList();
-    
+
     return TypeConverter.fromYarn
     return TypeConverter.fromYarn
     (taskReports).toArray(new org.apache.hadoop.mapreduce.TaskReport[0]);
     (taskReports).toArray(new org.apache.hadoop.mapreduce.TaskReport[0]);
   }
   }
 
 
   public boolean killTask(TaskAttemptID taskAttemptID, boolean fail)
   public boolean killTask(TaskAttemptID taskAttemptID, boolean fail)
        throws YarnRemoteException {
        throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID 
+    org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID
       = TypeConverter.toYarn(taskAttemptID);
       = TypeConverter.toYarn(taskAttemptID);
     if (fail) {
     if (fail) {
       FailTaskAttemptRequest failRequest = recordFactory.newRecordInstance(FailTaskAttemptRequest.class);
       FailTaskAttemptRequest failRequest = recordFactory.newRecordInstance(FailTaskAttemptRequest.class);
@@ -381,10 +382,10 @@ public class ClientServiceDelegate {
     }
     }
     return true;
     return true;
   }
   }
-  
+
   public boolean killJob(JobID oldJobID)
   public boolean killJob(JobID oldJobID)
        throws YarnRemoteException {
        throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId
     = TypeConverter.toYarn(oldJobID);
     = TypeConverter.toYarn(oldJobID);
     KillJobRequest killRequest = recordFactory.newRecordInstance(KillJobRequest.class);
     KillJobRequest killRequest = recordFactory.newRecordInstance(KillJobRequest.class);
     killRequest.setJobId(jobId);
     killRequest.setJobId(jobId);
@@ -392,5 +393,5 @@ public class ClientServiceDelegate {
     return true;
     return true;
   }
   }
 
 
-    
+
 }
 }

+ 28 - 34
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java

@@ -22,8 +22,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashMap;
 
 
 import org.apache.commons.lang.NotImplementedException;
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
 import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
@@ -55,40 +53,36 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.BuilderUtils;
 
 
 public class NotRunningJob implements MRClientProtocol {
 public class NotRunningJob implements MRClientProtocol {
 
 
-  private static final Log LOG = LogFactory.getLog(NotRunningJob.class);
-  
-  private RecordFactory recordFactory = 
+  private RecordFactory recordFactory =
     RecordFactoryProvider.getRecordFactory(null);
     RecordFactoryProvider.getRecordFactory(null);
-  
+
   private final JobState jobState;
   private final JobState jobState;
   private final ApplicationReport applicationReport;
   private final ApplicationReport applicationReport;
-  
-  
+
+
   private ApplicationReport getUnknownApplicationReport() {
   private ApplicationReport getUnknownApplicationReport() {
-    ApplicationReport unknown = 
-        recordFactory.newRecordInstance(ApplicationReport.class);
-    unknown.setUser("N/A");
-    unknown.setHost("N/A");
-    unknown.setName("N/A");
-    unknown.setQueue("N/A");
-    unknown.setStartTime(0);
-    unknown.setFinishTime(0);
-    unknown.setTrackingUrl("N/A");
-    unknown.setDiagnostics("N/A");
-    LOG.info("getUnknownApplicationReport");
-    return unknown;
+    ApplicationId unknownAppId = recordFactory.newRecordInstance(ApplicationId.class);
+
+    // Setting AppState to NEW and finalStatus to UNDEFINED as they are never used 
+    // for a non running job
+    return BuilderUtils.newApplicationReport(unknownAppId, "N/A", "N/A", "N/A", "N/A", 0, "", 
+        YarnApplicationState.NEW, "N/A", "N/A", 0, 0, FinalApplicationStatus.UNDEFINED);    
   }
   }
-  
+
   NotRunningJob(ApplicationReport applicationReport, JobState jobState) {
   NotRunningJob(ApplicationReport applicationReport, JobState jobState) {
-    this.applicationReport = 
-        (applicationReport ==  null) ? 
+    this.applicationReport =
+        (applicationReport ==  null) ?
             getUnknownApplicationReport() : applicationReport;
             getUnknownApplicationReport() : applicationReport;
     this.jobState = jobState;
     this.jobState = jobState;
   }
   }
@@ -96,7 +90,7 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public FailTaskAttemptResponse failTaskAttempt(
   public FailTaskAttemptResponse failTaskAttempt(
       FailTaskAttemptRequest request) throws YarnRemoteException {
       FailTaskAttemptRequest request) throws YarnRemoteException {
-    FailTaskAttemptResponse resp = 
+    FailTaskAttemptResponse resp =
       recordFactory.newRecordInstance(FailTaskAttemptResponse.class);
       recordFactory.newRecordInstance(FailTaskAttemptResponse.class);
     return resp;
     return resp;
   }
   }
@@ -104,7 +98,7 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public GetCountersResponse getCounters(GetCountersRequest request)
   public GetCountersResponse getCounters(GetCountersRequest request)
       throws YarnRemoteException {
       throws YarnRemoteException {
-    GetCountersResponse resp = 
+    GetCountersResponse resp =
       recordFactory.newRecordInstance(GetCountersResponse.class);
       recordFactory.newRecordInstance(GetCountersResponse.class);
     Counters counters = recordFactory.newRecordInstance(Counters.class);
     Counters counters = recordFactory.newRecordInstance(Counters.class);
     counters.addAllCounterGroups(new HashMap<String, CounterGroup>());
     counters.addAllCounterGroups(new HashMap<String, CounterGroup>());
@@ -115,7 +109,7 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
   public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
       throws YarnRemoteException {
       throws YarnRemoteException {
-    GetDiagnosticsResponse resp = 
+    GetDiagnosticsResponse resp =
       recordFactory.newRecordInstance(GetDiagnosticsResponse.class);
       recordFactory.newRecordInstance(GetDiagnosticsResponse.class);
     resp.addDiagnostics("");
     resp.addDiagnostics("");
     return resp;
     return resp;
@@ -135,7 +129,7 @@ public class NotRunningJob implements MRClientProtocol {
     jobReport.setTrackingUrl(applicationReport.getTrackingUrl());
     jobReport.setTrackingUrl(applicationReport.getTrackingUrl());
     jobReport.setFinishTime(applicationReport.getFinishTime());
     jobReport.setFinishTime(applicationReport.getFinishTime());
 
 
-    GetJobReportResponse resp = 
+    GetJobReportResponse resp =
         recordFactory.newRecordInstance(GetJobReportResponse.class);
         recordFactory.newRecordInstance(GetJobReportResponse.class);
     resp.setJobReport(jobReport);
     resp.setJobReport(jobReport);
     return resp;
     return resp;
@@ -145,7 +139,7 @@ public class NotRunningJob implements MRClientProtocol {
   public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
   public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
       GetTaskAttemptCompletionEventsRequest request)
       GetTaskAttemptCompletionEventsRequest request)
       throws YarnRemoteException {
       throws YarnRemoteException {
-    GetTaskAttemptCompletionEventsResponse resp = 
+    GetTaskAttemptCompletionEventsResponse resp =
       recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsResponse.class);
       recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsResponse.class);
     resp.addAllCompletionEvents(new ArrayList<TaskAttemptCompletionEvent>());
     resp.addAllCompletionEvents(new ArrayList<TaskAttemptCompletionEvent>());
     return resp;
     return resp;
@@ -161,7 +155,7 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
   public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
       throws YarnRemoteException {
       throws YarnRemoteException {
-    GetTaskReportResponse resp = 
+    GetTaskReportResponse resp =
       recordFactory.newRecordInstance(GetTaskReportResponse.class);
       recordFactory.newRecordInstance(GetTaskReportResponse.class);
     TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
     TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
     report.setTaskId(request.getTaskId());
     report.setTaskId(request.getTaskId());
@@ -176,7 +170,7 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
   public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
       throws YarnRemoteException {
       throws YarnRemoteException {
-    GetTaskReportsResponse resp = 
+    GetTaskReportsResponse resp =
       recordFactory.newRecordInstance(GetTaskReportsResponse.class);
       recordFactory.newRecordInstance(GetTaskReportsResponse.class);
     resp.addAllTaskReports(new ArrayList<TaskReport>());
     resp.addAllTaskReports(new ArrayList<TaskReport>());
     return resp;
     return resp;
@@ -185,7 +179,7 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public KillJobResponse killJob(KillJobRequest request)
   public KillJobResponse killJob(KillJobRequest request)
       throws YarnRemoteException {
       throws YarnRemoteException {
-    KillJobResponse resp = 
+    KillJobResponse resp =
       recordFactory.newRecordInstance(KillJobResponse.class);
       recordFactory.newRecordInstance(KillJobResponse.class);
     return resp;
     return resp;
   }
   }
@@ -193,7 +187,7 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public KillTaskResponse killTask(KillTaskRequest request)
   public KillTaskResponse killTask(KillTaskRequest request)
       throws YarnRemoteException {
       throws YarnRemoteException {
-    KillTaskResponse resp = 
+    KillTaskResponse resp =
       recordFactory.newRecordInstance(KillTaskResponse.class);
       recordFactory.newRecordInstance(KillTaskResponse.class);
     return resp;
     return resp;
   }
   }
@@ -201,9 +195,9 @@ public class NotRunningJob implements MRClientProtocol {
   @Override
   @Override
   public KillTaskAttemptResponse killTaskAttempt(
   public KillTaskAttemptResponse killTaskAttempt(
       KillTaskAttemptRequest request) throws YarnRemoteException {
       KillTaskAttemptRequest request) throws YarnRemoteException {
-    KillTaskAttemptResponse resp = 
+    KillTaskAttemptResponse resp =
       recordFactory.newRecordInstance(KillTaskAttemptResponse.class);
       recordFactory.newRecordInstance(KillTaskAttemptResponse.class);
     return resp;
     return resp;
   }
   }
-  
+
 }
 }

+ 41 - 41
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java

@@ -62,7 +62,6 @@ import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.api.records.LocalResource;
@@ -70,6 +69,7 @@ import org.apache.hadoop.yarn.api.records.LocalResourceType;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.URL;
 import org.apache.hadoop.yarn.api.records.URL;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -99,7 +99,7 @@ public class YARNRunner implements ClientProtocol {
   }
   }
 
 
   /**
   /**
-   * Similar to {@link #YARNRunner(Configuration)} but allowing injecting 
+   * Similar to {@link #YARNRunner(Configuration)} but allowing injecting
    * {@link ResourceMgrDelegate}. Enables mocking and testing.
    * {@link ResourceMgrDelegate}. Enables mocking and testing.
    * @param conf the configuration object for the client
    * @param conf the configuration object for the client
    * @param resMgrDelegate the resourcemanager client handle.
    * @param resMgrDelegate the resourcemanager client handle.
@@ -107,12 +107,12 @@ public class YARNRunner implements ClientProtocol {
   public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate) {
   public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate) {
    this(conf, resMgrDelegate, new ClientCache(conf, resMgrDelegate));
    this(conf, resMgrDelegate, new ClientCache(conf, resMgrDelegate));
   }
   }
-  
+
   /**
   /**
-   * Similar to {@link YARNRunner#YARNRunner(Configuration, ResourceMgrDelegate)} 
+   * Similar to {@link YARNRunner#YARNRunner(Configuration, ResourceMgrDelegate)}
    * but allowing injecting {@link ClientCache}. Enable mocking and testing.
    * but allowing injecting {@link ClientCache}. Enable mocking and testing.
    * @param conf the configuration object
    * @param conf the configuration object
-   * @param resMgrDelegate the resource manager delegate 
+   * @param resMgrDelegate the resource manager delegate
    * @param clientCache the client cache object.
    * @param clientCache the client cache object.
    */
    */
   public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate,
   public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate,
@@ -126,7 +126,7 @@ public class YARNRunner implements ClientProtocol {
       throw new RuntimeException("Error in instantiating YarnClient", ufe);
       throw new RuntimeException("Error in instantiating YarnClient", ufe);
     }
     }
   }
   }
-  
+
   @Override
   @Override
   public void cancelDelegationToken(Token<DelegationTokenIdentifier> arg0)
   public void cancelDelegationToken(Token<DelegationTokenIdentifier> arg0)
       throws IOException, InterruptedException {
       throws IOException, InterruptedException {
@@ -152,7 +152,7 @@ public class YARNRunner implements ClientProtocol {
 
 
   @Override
   @Override
   public ClusterMetrics getClusterMetrics() throws IOException,
   public ClusterMetrics getClusterMetrics() throws IOException,
-      InterruptedException {  
+      InterruptedException {
     return resMgrDelegate.getClusterMetrics();
     return resMgrDelegate.getClusterMetrics();
   }
   }
 
 
@@ -209,13 +209,13 @@ public class YARNRunner implements ClientProtocol {
   public String getSystemDir() throws IOException, InterruptedException {
   public String getSystemDir() throws IOException, InterruptedException {
     return resMgrDelegate.getSystemDir();
     return resMgrDelegate.getSystemDir();
   }
   }
-  
+
   @Override
   @Override
   public long getTaskTrackerExpiryInterval() throws IOException,
   public long getTaskTrackerExpiryInterval() throws IOException,
       InterruptedException {
       InterruptedException {
     return resMgrDelegate.getTaskTrackerExpiryInterval();
     return resMgrDelegate.getTaskTrackerExpiryInterval();
   }
   }
-  
+
   @Override
   @Override
   public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts)
   public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts)
   throws IOException, InterruptedException {
   throws IOException, InterruptedException {
@@ -230,20 +230,20 @@ public class YARNRunner implements ClientProtocol {
     }
     }
 
 
     // Construct necessary information to start the MR AM
     // Construct necessary information to start the MR AM
-    ApplicationSubmissionContext appContext = 
+    ApplicationSubmissionContext appContext =
       createApplicationSubmissionContext(conf, jobSubmitDir, ts);
       createApplicationSubmissionContext(conf, jobSubmitDir, ts);
-    
+
     // Submit to ResourceManager
     // Submit to ResourceManager
     ApplicationId applicationId = resMgrDelegate.submitApplication(appContext);
     ApplicationId applicationId = resMgrDelegate.submitApplication(appContext);
-    
+
     ApplicationReport appMaster = resMgrDelegate
     ApplicationReport appMaster = resMgrDelegate
         .getApplicationReport(applicationId);
         .getApplicationReport(applicationId);
-    String diagnostics = 
-        (appMaster == null ? 
+    String diagnostics =
+        (appMaster == null ?
             "application report is null" : appMaster.getDiagnostics());
             "application report is null" : appMaster.getDiagnostics());
-    if (appMaster == null || appMaster.getState() == ApplicationState.FAILED 
-        || appMaster.getState() == ApplicationState.KILLED) {
-      throw new IOException("Failed to run job : " + 
+    if (appMaster == null || appMaster.getYarnApplicationState() == YarnApplicationState.FAILED
+        || appMaster.getYarnApplicationState() == YarnApplicationState.KILLED) {
+      throw new IOException("Failed to run job : " +
         diagnostics);
         diagnostics);
     }
     }
     return clientCache.getClient(jobId).getJobStatus(jobId);
     return clientCache.getClient(jobId).getJobStatus(jobId);
@@ -266,7 +266,7 @@ public class YARNRunner implements ClientProtocol {
       Configuration jobConf,
       Configuration jobConf,
       String jobSubmitDir, Credentials ts) throws IOException {
       String jobSubmitDir, Credentials ts) throws IOException {
     ApplicationId applicationId = resMgrDelegate.getApplicationId();
     ApplicationId applicationId = resMgrDelegate.getApplicationId();
-    
+
     // Setup resource requirements
     // Setup resource requirements
     Resource capability = recordFactory.newRecordInstance(Resource.class);
     Resource capability = recordFactory.newRecordInstance(Resource.class);
     capability.setMemory(conf.getInt(MRJobConfig.MR_AM_VMEM_MB,
     capability.setMemory(conf.getInt(MRJobConfig.MR_AM_VMEM_MB,
@@ -276,9 +276,9 @@ public class YARNRunner implements ClientProtocol {
     // Setup LocalResources
     // Setup LocalResources
     Map<String, LocalResource> localResources =
     Map<String, LocalResource> localResources =
         new HashMap<String, LocalResource>();
         new HashMap<String, LocalResource>();
-    
+
     Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE);
     Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE);
-    
+
     URL yarnUrlForJobSubmitDir = ConverterUtils
     URL yarnUrlForJobSubmitDir = ConverterUtils
         .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem()
         .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem()
             .resolvePath(
             .resolvePath(
@@ -299,18 +299,18 @@ public class YARNRunner implements ClientProtocol {
       LOG.info("Job jar is not present. "
       LOG.info("Job jar is not present. "
           + "Not adding any jar to the list of resources.");
           + "Not adding any jar to the list of resources.");
     }
     }
-    
+
     // TODO gross hack
     // TODO gross hack
-    for (String s : new String[] { 
-        MRJobConfig.JOB_SPLIT, 
+    for (String s : new String[] {
+        MRJobConfig.JOB_SPLIT,
         MRJobConfig.JOB_SPLIT_METAINFO,
         MRJobConfig.JOB_SPLIT_METAINFO,
         MRJobConfig.APPLICATION_TOKENS_FILE }) {
         MRJobConfig.APPLICATION_TOKENS_FILE }) {
       localResources.put(
       localResources.put(
           MRJobConfig.JOB_SUBMIT_DIR + "/" + s,
           MRJobConfig.JOB_SUBMIT_DIR + "/" + s,
-          createApplicationResource(defaultFileContext, 
+          createApplicationResource(defaultFileContext,
               new Path(jobSubmitDir, s)));
               new Path(jobSubmitDir, s)));
     }
     }
-    
+
     // Setup security tokens
     // Setup security tokens
     ByteBuffer securityTokens = null;
     ByteBuffer securityTokens = null;
     if (UserGroupInformation.isSecurityEnabled()) {
     if (UserGroupInformation.isSecurityEnabled()) {
@@ -322,20 +322,20 @@ public class YARNRunner implements ClientProtocol {
     // Setup the command to run the AM
     // Setup the command to run the AM
     Vector<CharSequence> vargs = new Vector<CharSequence>(8);
     Vector<CharSequence> vargs = new Vector<CharSequence>(8);
     vargs.add(Environment.JAVA_HOME.$() + "/bin/java");
     vargs.add(Environment.JAVA_HOME.$() + "/bin/java");
-    
+
     long logSize = TaskLog.getTaskLogLength(new JobConf(conf));
     long logSize = TaskLog.getTaskLogLength(new JobConf(conf));
     vargs.add("-Dlog4j.configuration=container-log4j.properties");
     vargs.add("-Dlog4j.configuration=container-log4j.properties");
     vargs.add("-D" + MRJobConfig.TASK_LOG_DIR + "="
     vargs.add("-D" + MRJobConfig.TASK_LOG_DIR + "="
         + ApplicationConstants.LOG_DIR_EXPANSION_VAR);
         + ApplicationConstants.LOG_DIR_EXPANSION_VAR);
     vargs.add("-D" + MRJobConfig.TASK_LOG_SIZE + "=" + logSize);
     vargs.add("-D" + MRJobConfig.TASK_LOG_SIZE + "=" + logSize);
-    
+
     vargs.add(conf.get(MRJobConfig.MR_AM_COMMAND_OPTS,
     vargs.add(conf.get(MRJobConfig.MR_AM_COMMAND_OPTS,
         MRJobConfig.DEFAULT_MR_AM_COMMAND_OPTS));
         MRJobConfig.DEFAULT_MR_AM_COMMAND_OPTS));
 
 
     vargs.add(MRJobConfig.APPLICATION_MASTER_CLASS);
     vargs.add(MRJobConfig.APPLICATION_MASTER_CLASS);
-    vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + 
+    vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
         Path.SEPARATOR + ApplicationConstants.STDOUT);
         Path.SEPARATOR + ApplicationConstants.STDOUT);
-    vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + 
+    vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
         Path.SEPARATOR + ApplicationConstants.STDERR);
         Path.SEPARATOR + ApplicationConstants.STDERR);
 
 
 
 
@@ -349,12 +349,12 @@ public class YARNRunner implements ClientProtocol {
 
 
     LOG.info("Command to launch container for ApplicationMaster is : "
     LOG.info("Command to launch container for ApplicationMaster is : "
         + mergedCommand);
         + mergedCommand);
-    
-    // Setup the CLASSPATH in environment 
+
+    // Setup the CLASSPATH in environment
     // i.e. add { job jar, CWD, Hadoop jars} to classpath.
     // i.e. add { job jar, CWD, Hadoop jars} to classpath.
     Map<String, String> environment = new HashMap<String, String>();
     Map<String, String> environment = new HashMap<String, String>();
     MRApps.setClasspath(environment);
     MRApps.setClasspath(environment);
-    
+
     // Parse distributed cache
     // Parse distributed cache
     MRApps.setupDistributedCache(jobConf, localResources);
     MRApps.setupDistributedCache(jobConf, localResources);
 
 
@@ -374,12 +374,12 @@ public class YARNRunner implements ClientProtocol {
     appContext.setUser(                                        // User name
     appContext.setUser(                                        // User name
         UserGroupInformation.getCurrentUser().getShortUserName());
         UserGroupInformation.getCurrentUser().getShortUserName());
     appContext.setQueue(                                       // Queue name
     appContext.setQueue(                                       // Queue name
-        jobConf.get(JobContext.QUEUE_NAME,     
+        jobConf.get(JobContext.QUEUE_NAME,
         YarnConfiguration.DEFAULT_QUEUE_NAME));
         YarnConfiguration.DEFAULT_QUEUE_NAME));
     appContext.setApplicationName(                             // Job name
     appContext.setApplicationName(                             // Job name
-        jobConf.get(JobContext.JOB_NAME, 
-        YarnConfiguration.DEFAULT_APPLICATION_NAME));              
-    appContext.setAMContainerSpec(amContainer);         // AM Container 
+        jobConf.get(JobContext.JOB_NAME,
+        YarnConfiguration.DEFAULT_APPLICATION_NAME));
+    appContext.setAMContainerSpec(amContainer);         // AM Container
 
 
     return appContext;
     return appContext;
   }
   }
@@ -394,14 +394,14 @@ public class YARNRunner implements ClientProtocol {
   public long getProtocolVersion(String arg0, long arg1) throws IOException {
   public long getProtocolVersion(String arg0, long arg1) throws IOException {
     return resMgrDelegate.getProtocolVersion(arg0, arg1);
     return resMgrDelegate.getProtocolVersion(arg0, arg1);
   }
   }
-  
+
   @Override
   @Override
   public long renewDelegationToken(Token<DelegationTokenIdentifier> arg0)
   public long renewDelegationToken(Token<DelegationTokenIdentifier> arg0)
       throws IOException, InterruptedException {
       throws IOException, InterruptedException {
     return resMgrDelegate.renewDelegationToken(arg0);
     return resMgrDelegate.renewDelegationToken(arg0);
   }
   }
 
 
-  
+
   @Override
   @Override
   public Counters getJobCounters(JobID arg0) throws IOException,
   public Counters getJobCounters(JobID arg0) throws IOException,
       InterruptedException {
       InterruptedException {
@@ -419,7 +419,7 @@ public class YARNRunner implements ClientProtocol {
     JobStatus status = clientCache.getClient(jobID).getJobStatus(jobID);
     JobStatus status = clientCache.getClient(jobID).getJobStatus(jobID);
     return status;
     return status;
   }
   }
-  
+
   @Override
   @Override
   public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
   public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
       int arg2) throws IOException, InterruptedException {
       int arg2) throws IOException, InterruptedException {
@@ -446,8 +446,8 @@ public class YARNRunner implements ClientProtocol {
     if (status.getState() != JobStatus.State.RUNNING) {
     if (status.getState() != JobStatus.State.RUNNING) {
       resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId());
       resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId());
       return;
       return;
-    } 
-    
+    }
+
     try {
     try {
       /* send a kill to the AM */
       /* send a kill to the AM */
       clientCache.getClient(arg0).killJob(arg0);
       clientCache.getClient(arg0).killJob(arg0);

+ 24 - 21
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java

@@ -88,7 +88,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
 import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -107,17 +108,17 @@ public class TestClientRedirect {
   private static final Log LOG = LogFactory.getLog(TestClientRedirect.class);
   private static final Log LOG = LogFactory.getLog(TestClientRedirect.class);
   private static final String RMADDRESS = "0.0.0.0:8054";
   private static final String RMADDRESS = "0.0.0.0:8054";
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
-  
+
   private static final String AMHOSTADDRESS = "0.0.0.0:10020";
   private static final String AMHOSTADDRESS = "0.0.0.0:10020";
   private static final String HSHOSTADDRESS = "0.0.0.0:10021";
   private static final String HSHOSTADDRESS = "0.0.0.0:10021";
-  private volatile boolean amContact = false; 
+  private volatile boolean amContact = false;
   private volatile boolean hsContact = false;
   private volatile boolean hsContact = false;
   private volatile boolean amRunning = false;
   private volatile boolean amRunning = false;
   private volatile boolean amRestarting = false;
   private volatile boolean amRestarting = false;
 
 
   @Test
   @Test
   public void testRedirect() throws Exception {
   public void testRedirect() throws Exception {
-    
+
     Configuration conf = new YarnConfiguration();
     Configuration conf = new YarnConfiguration();
     conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
     conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
     conf.set(YarnConfiguration.RM_ADDRESS, RMADDRESS);
     conf.set(YarnConfiguration.RM_ADDRESS, RMADDRESS);
@@ -125,7 +126,7 @@ public class TestClientRedirect {
     RMService rmService = new RMService("test");
     RMService rmService = new RMService("test");
     rmService.init(conf);
     rmService.init(conf);
     rmService.start();
     rmService.start();
-  
+
     AMService amService = new AMService();
     AMService amService = new AMService();
     amService.init(conf);
     amService.init(conf);
     amService.start(conf);
     amService.start(conf);
@@ -134,16 +135,16 @@ public class TestClientRedirect {
     HistoryService historyService = new HistoryService();
     HistoryService historyService = new HistoryService();
     historyService.init(conf);
     historyService.init(conf);
     historyService.start(conf);
     historyService.start(conf);
-  
+
     LOG.info("services started");
     LOG.info("services started");
     Cluster cluster = new Cluster(conf);
     Cluster cluster = new Cluster(conf);
     org.apache.hadoop.mapreduce.JobID jobID =
     org.apache.hadoop.mapreduce.JobID jobID =
       new org.apache.hadoop.mapred.JobID("201103121733", 1);
       new org.apache.hadoop.mapred.JobID("201103121733", 1);
-    org.apache.hadoop.mapreduce.Counters counters = 
+    org.apache.hadoop.mapreduce.Counters counters =
         cluster.getJob(jobID).getCounters();
         cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     validateCounters(counters);
     Assert.assertTrue(amContact);
     Assert.assertTrue(amContact);
-   
+
     LOG.info("Sleeping for 5 seconds before stop for" +
     LOG.info("Sleeping for 5 seconds before stop for" +
     " the client socket to not get EOF immediately..");
     " the client socket to not get EOF immediately..");
     Thread.sleep(5000);
     Thread.sleep(5000);
@@ -155,17 +156,17 @@ public class TestClientRedirect {
     LOG.info("Sleeping for 5 seconds after stop for" +
     LOG.info("Sleeping for 5 seconds after stop for" +
     		" the server to exit cleanly..");
     		" the server to exit cleanly..");
     Thread.sleep(5000);
     Thread.sleep(5000);
-    
+
     amRestarting = true;
     amRestarting = true;
     // Same client
     // Same client
     //results are returned from fake (not started job)
     //results are returned from fake (not started job)
     counters = cluster.getJob(jobID).getCounters();
     counters = cluster.getJob(jobID).getCounters();
     Assert.assertEquals(0, counters.countCounters());
     Assert.assertEquals(0, counters.countCounters());
     Job job = cluster.getJob(jobID);
     Job job = cluster.getJob(jobID);
-    org.apache.hadoop.mapreduce.TaskID taskId = 
+    org.apache.hadoop.mapreduce.TaskID taskId =
       new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0);
       new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0);
     TaskAttemptID tId = new TaskAttemptID(taskId, 0);
     TaskAttemptID tId = new TaskAttemptID(taskId, 0);
-    
+
     //invoke all methods to check that no exception is thrown
     //invoke all methods to check that no exception is thrown
     job.killJob();
     job.killJob();
     job.killTask(tId);
     job.killTask(tId);
@@ -175,25 +176,25 @@ public class TestClientRedirect {
     job.getTaskDiagnostics(tId);
     job.getTaskDiagnostics(tId);
     job.getTaskReports(TaskType.MAP);
     job.getTaskReports(TaskType.MAP);
     job.getTrackingURL();
     job.getTrackingURL();
-    
+
     amRestarting = false;
     amRestarting = false;
     amService = new AMService();
     amService = new AMService();
     amService.init(conf);
     amService.init(conf);
     amService.start(conf);
     amService.start(conf);
     amRunning = true;
     amRunning = true;
     amContact = false; //reset
     amContact = false; //reset
-    
+
     counters = cluster.getJob(jobID).getCounters();
     counters = cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     validateCounters(counters);
     Assert.assertTrue(amContact);
     Assert.assertTrue(amContact);
-    
+
     amRunning = false;
     amRunning = false;
 
 
     // Same client
     // Same client
     counters = cluster.getJob(jobID).getCounters();
     counters = cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     validateCounters(counters);
     Assert.assertTrue(hsContact);
     Assert.assertTrue(hsContact);
-    
+
     rmService.stop();
     rmService.stop();
     historyService.stop();
     historyService.stop();
   }
   }
@@ -248,7 +249,7 @@ public class TestClientRedirect {
     public GetNewApplicationResponse getNewApplication(GetNewApplicationRequest request) throws YarnRemoteException {
     public GetNewApplicationResponse getNewApplication(GetNewApplicationRequest request) throws YarnRemoteException {
       return null;
       return null;
     }
     }
-    
+
     @Override
     @Override
     public GetApplicationReportResponse getApplicationReport(
     public GetApplicationReportResponse getApplicationReport(
         GetApplicationReportRequest request) throws YarnRemoteException {
         GetApplicationReportRequest request) throws YarnRemoteException {
@@ -256,12 +257,14 @@ public class TestClientRedirect {
       ApplicationReport application = recordFactory
       ApplicationReport application = recordFactory
           .newRecordInstance(ApplicationReport.class);
           .newRecordInstance(ApplicationReport.class);
       application.setApplicationId(applicationId);
       application.setApplicationId(applicationId);
+      application.setFinalApplicationStatus(FinalApplicationStatus.UNDEFINED);
       if (amRunning) {
       if (amRunning) {
-        application.setState(ApplicationState.RUNNING);
+        application.setYarnApplicationState(YarnApplicationState.RUNNING);
       } else if (amRestarting) {
       } else if (amRestarting) {
-        application.setState(ApplicationState.SUBMITTED);
+        application.setYarnApplicationState(YarnApplicationState.SUBMITTED);
       } else {
       } else {
-        application.setState(ApplicationState.SUCCEEDED);
+        application.setYarnApplicationState(YarnApplicationState.FINISHED);
+        application.setFinalApplicationStatus(FinalApplicationStatus.SUCCEEDED);
       }
       }
       String[] split = AMHOSTADDRESS.split(":");
       String[] split = AMHOSTADDRESS.split(":");
       application.setHost(split[0]);
       application.setHost(split[0]);
@@ -339,7 +342,7 @@ public class TestClientRedirect {
    }
    }
   }
   }
 
 
-  class AMService extends AbstractService 
+  class AMService extends AbstractService
       implements MRClientProtocol {
       implements MRClientProtocol {
     private InetSocketAddress bindAddress;
     private InetSocketAddress bindAddress;
     private Server server;
     private Server server;
@@ -347,7 +350,7 @@ public class TestClientRedirect {
     public AMService() {
     public AMService() {
       this(AMHOSTADDRESS);
       this(AMHOSTADDRESS);
     }
     }
-    
+
     public AMService(String hostAddress) {
     public AMService(String hostAddress) {
       super("AMService");
       super("AMService");
       this.hostAddress = hostAddress;
       this.hostAddress = hostAddress;

+ 4 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java

@@ -32,8 +32,9 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.ipc.RPCUtil;
 import org.apache.hadoop.yarn.ipc.RPCUtil;
@@ -163,7 +164,7 @@ public class TestClientServiceDelegate {
   private ApplicationReport getApplicationReport() {
   private ApplicationReport getApplicationReport() {
     ApplicationReport applicationReport = Records
     ApplicationReport applicationReport = Records
         .newRecord(ApplicationReport.class);
         .newRecord(ApplicationReport.class);
-    applicationReport.setState(ApplicationState.SUCCEEDED);
+    applicationReport.setYarnApplicationState(YarnApplicationState.FINISHED);
     applicationReport.setUser("root");
     applicationReport.setUser("root");
     applicationReport.setHost("N/A");
     applicationReport.setHost("N/A");
     applicationReport.setName("N/A");
     applicationReport.setName("N/A");
@@ -172,6 +173,7 @@ public class TestClientServiceDelegate {
     applicationReport.setFinishTime(0);
     applicationReport.setFinishTime(0);
     applicationReport.setTrackingUrl("N/A");
     applicationReport.setTrackingUrl("N/A");
     applicationReport.setDiagnostics("N/A");
     applicationReport.setDiagnostics("N/A");
+    applicationReport.setFinalApplicationStatus(FinalApplicationStatus.SUCCEEDED);
     return applicationReport;
     return applicationReport;
   }
   }
 
 

+ 0 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java

@@ -36,7 +36,6 @@ import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
 import org.junit.Before;
 import org.junit.Before;
 import org.junit.After;
 import org.junit.After;

+ 21 - 21
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java

@@ -64,10 +64,10 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
 import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.QueueInfo;
 import org.apache.hadoop.yarn.api.records.QueueInfo;
 import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
 import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -77,25 +77,25 @@ import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 import org.mockito.stubbing.Answer;
 
 
 /**
 /**
- * Test YarnRunner and make sure the client side plugin works 
+ * Test YarnRunner and make sure the client side plugin works
  * fine
  * fine
  */
  */
 public class TestYARNRunner extends TestCase {
 public class TestYARNRunner extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
   private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
- 
+
   private YARNRunner yarnRunner;
   private YARNRunner yarnRunner;
   private ResourceMgrDelegate resourceMgrDelegate;
   private ResourceMgrDelegate resourceMgrDelegate;
   private YarnConfiguration conf;
   private YarnConfiguration conf;
   private ClientCache clientCache;
   private ClientCache clientCache;
   private ApplicationId appId;
   private ApplicationId appId;
   private JobID jobId;
   private JobID jobId;
-  private File testWorkDir = 
+  private File testWorkDir =
       new File("target", TestYARNRunner.class.getName());
       new File("target", TestYARNRunner.class.getName());
   private ApplicationSubmissionContext submissionContext;
   private ApplicationSubmissionContext submissionContext;
   private  ClientServiceDelegate clientDelegate;
   private  ClientServiceDelegate clientDelegate;
   private static final String failString = "Rejected job";
   private static final String failString = "Rejected job";
- 
+
   @Before
   @Before
   public void setUp() throws Exception {
   public void setUp() throws Exception {
     resourceMgrDelegate = mock(ResourceMgrDelegate.class);
     resourceMgrDelegate = mock(ResourceMgrDelegate.class);
@@ -115,7 +115,7 @@ public class TestYARNRunner extends TestCase {
         }
         }
         ).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class),
         ).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class),
             any(String.class), any(Credentials.class));
             any(String.class), any(Credentials.class));
-    
+
     appId = recordFactory.newRecordInstance(ApplicationId.class);
     appId = recordFactory.newRecordInstance(ApplicationId.class);
     appId.setClusterTimestamp(System.currentTimeMillis());
     appId.setClusterTimestamp(System.currentTimeMillis());
     appId.setId(1);
     appId.setId(1);
@@ -125,13 +125,13 @@ public class TestYARNRunner extends TestCase {
     }
     }
     testWorkDir.mkdirs();
     testWorkDir.mkdirs();
    }
    }
-  
-  
+
+
   @Test
   @Test
   public void testJobKill() throws Exception {
   public void testJobKill() throws Exception {
     clientDelegate = mock(ClientServiceDelegate.class);
     clientDelegate = mock(ClientServiceDelegate.class);
-    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new 
-        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f, 
+    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
+        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
             State.PREP, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
             State.PREP, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
     when(clientDelegate.killJob(any(JobID.class))).thenReturn(true);
     when(clientDelegate.killJob(any(JobID.class))).thenReturn(true);
     doAnswer(
     doAnswer(
@@ -145,13 +145,13 @@ public class TestYARNRunner extends TestCase {
         ).when(clientCache).getClient(any(JobID.class));
         ).when(clientCache).getClient(any(JobID.class));
     yarnRunner.killJob(jobId);
     yarnRunner.killJob(jobId);
     verify(resourceMgrDelegate).killApplication(appId);
     verify(resourceMgrDelegate).killApplication(appId);
-    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new 
-        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f, 
+    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
+        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
             State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
             State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
     yarnRunner.killJob(jobId);
     yarnRunner.killJob(jobId);
     verify(clientDelegate).killJob(jobId);
     verify(clientDelegate).killJob(jobId);
   }
   }
-  
+
   @Test
   @Test
   public void testJobSubmissionFailure() throws Exception {
   public void testJobSubmissionFailure() throws Exception {
     when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))).
     when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))).
@@ -159,7 +159,7 @@ public class TestYARNRunner extends TestCase {
     ApplicationReport report = mock(ApplicationReport.class);
     ApplicationReport report = mock(ApplicationReport.class);
     when(report.getApplicationId()).thenReturn(appId);
     when(report.getApplicationId()).thenReturn(appId);
     when(report.getDiagnostics()).thenReturn(failString);
     when(report.getDiagnostics()).thenReturn(failString);
-    when(report.getState()).thenReturn(ApplicationState.FAILED);
+    when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.FAILED);
     when(resourceMgrDelegate.getApplicationReport(appId)).thenReturn(report);
     when(resourceMgrDelegate.getApplicationReport(appId)).thenReturn(report);
     Credentials credentials = new Credentials();
     Credentials credentials = new Credentials();
     File jobxml = new File(testWorkDir, "job.xml");
     File jobxml = new File(testWorkDir, "job.xml");
@@ -167,13 +167,13 @@ public class TestYARNRunner extends TestCase {
     conf.writeXml(out);
     conf.writeXml(out);
     out.close();
     out.close();
     try {
     try {
-      yarnRunner.submitJob(jobId, testWorkDir.getAbsolutePath().toString(), credentials); 
+      yarnRunner.submitJob(jobId, testWorkDir.getAbsolutePath().toString(), credentials);
     } catch(IOException io) {
     } catch(IOException io) {
       LOG.info("Logging exception:", io);
       LOG.info("Logging exception:", io);
       assertTrue(io.getLocalizedMessage().contains(failString));
       assertTrue(io.getLocalizedMessage().contains(failString));
     }
     }
   }
   }
-  
+
   @Test
   @Test
   public void testResourceMgrDelegate() throws Exception {
   public void testResourceMgrDelegate() throws Exception {
     /* we not want a mock of resourcemgr deleagte */
     /* we not want a mock of resourcemgr deleagte */
@@ -184,19 +184,19 @@ public class TestYARNRunner extends TestCase {
     .thenReturn(null);
     .thenReturn(null);
     delegate.killApplication(appId);
     delegate.killApplication(appId);
     verify(clientRMProtocol).forceKillApplication(any(KillApplicationRequest.class));
     verify(clientRMProtocol).forceKillApplication(any(KillApplicationRequest.class));
-    
+
     /* make sure getalljobs calls get all applications */
     /* make sure getalljobs calls get all applications */
     when(clientRMProtocol.getAllApplications(any(GetAllApplicationsRequest.class))).
     when(clientRMProtocol.getAllApplications(any(GetAllApplicationsRequest.class))).
     thenReturn(recordFactory.newRecordInstance(GetAllApplicationsResponse.class));
     thenReturn(recordFactory.newRecordInstance(GetAllApplicationsResponse.class));
     delegate.getAllJobs();
     delegate.getAllJobs();
     verify(clientRMProtocol).getAllApplications(any(GetAllApplicationsRequest.class));
     verify(clientRMProtocol).getAllApplications(any(GetAllApplicationsRequest.class));
-    
+
     /* make sure getapplication report is called */
     /* make sure getapplication report is called */
     when(clientRMProtocol.getApplicationReport(any(GetApplicationReportRequest.class)))
     when(clientRMProtocol.getApplicationReport(any(GetApplicationReportRequest.class)))
     .thenReturn(recordFactory.newRecordInstance(GetApplicationReportResponse.class));
     .thenReturn(recordFactory.newRecordInstance(GetApplicationReportResponse.class));
     delegate.getApplicationReport(appId);
     delegate.getApplicationReport(appId);
     verify(clientRMProtocol).getApplicationReport(any(GetApplicationReportRequest.class));
     verify(clientRMProtocol).getApplicationReport(any(GetApplicationReportRequest.class));
-    
+
     /* make sure metrics is called */
     /* make sure metrics is called */
     GetClusterMetricsResponse clusterMetricsResponse = recordFactory.newRecordInstance
     GetClusterMetricsResponse clusterMetricsResponse = recordFactory.newRecordInstance
         (GetClusterMetricsResponse.class);
         (GetClusterMetricsResponse.class);
@@ -206,7 +206,7 @@ public class TestYARNRunner extends TestCase {
     .thenReturn(clusterMetricsResponse);
     .thenReturn(clusterMetricsResponse);
     delegate.getClusterMetrics();
     delegate.getClusterMetrics();
     verify(clientRMProtocol).getClusterMetrics(any(GetClusterMetricsRequest.class));
     verify(clientRMProtocol).getClusterMetrics(any(GetClusterMetricsRequest.class));
-    
+
     when(clientRMProtocol.getClusterNodes(any(GetClusterNodesRequest.class))).
     when(clientRMProtocol.getClusterNodes(any(GetClusterNodesRequest.class))).
     thenReturn(recordFactory.newRecordInstance(GetClusterNodesResponse.class));
     thenReturn(recordFactory.newRecordInstance(GetClusterNodesResponse.class));
     delegate.getActiveTrackers();
     delegate.getActiveTrackers();
@@ -227,7 +227,7 @@ public class TestYARNRunner extends TestCase {
     thenReturn(queueInfoResponse);
     thenReturn(queueInfoResponse);
     delegate.getQueues();
     delegate.getQueues();
     verify(clientRMProtocol).getQueueInfo(any(GetQueueInfoRequest.class));
     verify(clientRMProtocol).getQueueInfo(any(GetQueueInfoRequest.class));
-    
+
     GetQueueUserAclsInfoResponse aclResponse = recordFactory.newRecordInstance(
     GetQueueUserAclsInfoResponse aclResponse = recordFactory.newRecordInstance(
         GetQueueUserAclsInfoResponse.class);
         GetQueueUserAclsInfoResponse.class);
     when(clientRMProtocol.getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class)))
     when(clientRMProtocol.getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class)))

+ 17 - 16
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/FinishApplicationMasterRequest.java

@@ -22,15 +22,16 @@ import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
 import org.apache.hadoop.yarn.api.AMRMProtocol;
 import org.apache.hadoop.yarn.api.AMRMProtocol;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 
 
 /**
 /**
- * <p>The finalization request sent by the <code>ApplicationMaster</code> to 
+ * <p>The finalization request sent by the <code>ApplicationMaster</code> to
  * inform the <code>ResourceManager</code> about its completion.</p>
  * inform the <code>ResourceManager</code> about its completion.</p>
- * 
+ *
  * <p>The final request includes details such:
  * <p>The final request includes details such:
  *   <ul>
  *   <ul>
  *     <li>
  *     <li>
- *         {@link ApplicationAttemptId} being managed by the 
+ *         {@link ApplicationAttemptId} being managed by the
  *         <code>ApplicationMaster</code>
  *         <code>ApplicationMaster</code>
  *     </li>
  *     </li>
  *     <li>Final state of the <code>ApplicationMaster</code></li>
  *     <li>Final state of the <code>ApplicationMaster</code></li>
@@ -47,19 +48,19 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 public interface FinishApplicationMasterRequest {
 public interface FinishApplicationMasterRequest {
 
 
   /**
   /**
-   * Get the <code>ApplicationAttemptId</code> being managed by the 
+   * Get the <code>ApplicationAttemptId</code> being managed by the
    * <code>ApplicationMaster</code>.
    * <code>ApplicationMaster</code>.
-   * @return <code>ApplicationAttemptId</code> being managed by the 
+   * @return <code>ApplicationAttemptId</code> being managed by the
    *         <code>ApplicationMaster</code>
    *         <code>ApplicationMaster</code>
    */
    */
   @Public
   @Public
   @Stable
   @Stable
   ApplicationAttemptId getApplicationAttemptId();
   ApplicationAttemptId getApplicationAttemptId();
-  
+
   /**
   /**
-   * Set the <code>ApplicationAttemptId</code> being managed by the 
+   * Set the <code>ApplicationAttemptId</code> being managed by the
    * <code>ApplicationMaster</code>.
    * <code>ApplicationMaster</code>.
-   * @param applicationAttemptId <code>ApplicationAttemptId</code> being managed 
+   * @param applicationAttemptId <code>ApplicationAttemptId</code> being managed
    *                             by the <code>ApplicationMaster</code>
    *                             by the <code>ApplicationMaster</code>
    */
    */
   @Public
   @Public
@@ -72,15 +73,15 @@ public interface FinishApplicationMasterRequest {
    */
    */
   @Public
   @Public
   @Stable
   @Stable
-  String getFinalState();
-  
+  FinalApplicationStatus getFinalApplicationStatus();
+
   /**
   /**
-   * Set <em>final state</em> of the <code>ApplicationMaster</code>
-   * @param finalState <em>final state</em> of the <code>ApplicationMaster</code>
+   * Set the <em>finish state</em> of the <code>ApplicationMaster</code>
+   * @param finishState <em>finish state</em> of the <code>ApplicationMaster</code>
    */
    */
   @Public
   @Public
   @Stable
   @Stable
-  void setFinalState(String finalState);
+  void setFinishApplicationStatus(FinalApplicationStatus finishState);
 
 
   /**
   /**
    * Get <em>diagnostic information</em> on application failure.
    * Get <em>diagnostic information</em> on application failure.
@@ -89,7 +90,7 @@ public interface FinishApplicationMasterRequest {
   @Public
   @Public
   @Stable
   @Stable
   String getDiagnostics();
   String getDiagnostics();
-  
+
   /**
   /**
    * Set <em>diagnostic information</em> on application failure.
    * Set <em>diagnostic information</em> on application failure.
    * @param diagnostics <em>diagnostic information</em> on application failure
    * @param diagnostics <em>diagnostic information</em> on application failure
@@ -105,10 +106,10 @@ public interface FinishApplicationMasterRequest {
   @Public
   @Public
   @Stable
   @Stable
   String getTrackingUrl();
   String getTrackingUrl();
-  
+
   /**
   /**
    * Set the <em>tracking URL</em>for the <code>ApplicationMaster</code>
    * Set the <em>tracking URL</em>for the <code>ApplicationMaster</code>
-   * @param url <em>tracking URL</em>for the 
+   * @param url <em>tracking URL</em>for the
    *                   <code>ApplicationMaster</code>
    *                   <code>ApplicationMaster</code>
    */
    */
   @Public
   @Public

+ 29 - 15
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java

@@ -21,23 +21,24 @@ package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
 
 
 import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProtoOrBuilder;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProtoOrBuilder;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProtoOrBuilder;
+import org.apache.hadoop.yarn.util.ProtoUtils;
 
 
 
 
-    
 public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplicationMasterRequestProto> implements FinishApplicationMasterRequest {
 public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplicationMasterRequestProto> implements FinishApplicationMasterRequest {
   FinishApplicationMasterRequestProto proto = FinishApplicationMasterRequestProto.getDefaultInstance();
   FinishApplicationMasterRequestProto proto = FinishApplicationMasterRequestProto.getDefaultInstance();
   FinishApplicationMasterRequestProto.Builder builder = null;
   FinishApplicationMasterRequestProto.Builder builder = null;
   boolean viaProto = false;
   boolean viaProto = false;
-  
+
   private ApplicationAttemptId appAttemptId = null;
   private ApplicationAttemptId appAttemptId = null;
-  
-  
+
+
   public FinishApplicationMasterRequestPBImpl() {
   public FinishApplicationMasterRequestPBImpl() {
     builder = FinishApplicationMasterRequestProto.newBuilder();
     builder = FinishApplicationMasterRequestProto.newBuilder();
   }
   }
@@ -46,7 +47,7 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
     this.proto = proto;
     this.proto = proto;
     viaProto = true;
     viaProto = true;
   }
   }
-  
+
   public FinishApplicationMasterRequestProto getProto() {
   public FinishApplicationMasterRequestProto getProto() {
       mergeLocalToProto();
       mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
     proto = viaProto ? proto : builder.build();
@@ -61,7 +62,7 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
   }
   }
 
 
   private void mergeLocalToProto() {
   private void mergeLocalToProto() {
-    if (viaProto) 
+    if (viaProto)
       maybeInitBuilder();
       maybeInitBuilder();
     mergeLocalToBuilder();
     mergeLocalToBuilder();
     proto = builder.build();
     proto = builder.build();
@@ -74,8 +75,7 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
     }
     }
     viaProto = false;
     viaProto = false;
   }
   }
-    
-  
+
   @Override
   @Override
   public ApplicationAttemptId getApplicationAttemptId() {
   public ApplicationAttemptId getApplicationAttemptId() {
     FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
     FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
@@ -92,7 +92,7 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
   @Override
   @Override
   public void setAppAttemptId(ApplicationAttemptId applicationAttemptId) {
   public void setAppAttemptId(ApplicationAttemptId applicationAttemptId) {
     maybeInitBuilder();
     maybeInitBuilder();
-    if (applicationAttemptId == null) 
+    if (applicationAttemptId == null)
       builder.clearApplicationAttemptId();
       builder.clearApplicationAttemptId();
     this.appAttemptId = applicationAttemptId;
     this.appAttemptId = applicationAttemptId;
   }
   }
@@ -122,15 +122,22 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
   }
   }
 
 
   @Override
   @Override
-  public String getFinalState() {
+  public FinalApplicationStatus getFinalApplicationStatus() {
     FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
     FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
-    return p.getFinalState();
+    if (!p.hasFinalApplicationStatus()) {
+      return null;
+    }	
+    return convertFromProtoFormat(p.getFinalApplicationStatus());
   }
   }
 
 
   @Override
   @Override
-  public void setFinalState(String state) {
+  public void setFinishApplicationStatus(FinalApplicationStatus finishState) {
     maybeInitBuilder();
     maybeInitBuilder();
-    builder.setFinalState(state);
+    if (finishState == null) {
+      builder.clearFinalApplicationStatus();
+      return;
+    }
+    builder.setFinalApplicationStatus(convertToProtoFormat(finishState));
   }
   }
 
 
   private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) {
   private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) {
@@ -141,6 +148,13 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
     return ((ApplicationAttemptIdPBImpl)t).getProto();
     return ((ApplicationAttemptIdPBImpl)t).getProto();
   }
   }
 
 
+  private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) {
+    return ProtoUtils.convertFromProtoFormat(s);
+  }
+
+  private FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus s) {
+    return ProtoUtils.convertToProtoFormat(s);
+  }
 
 
 
 
-}  
+}

+ 11 - 13
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.java

@@ -23,24 +23,22 @@ import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRespo
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
-import org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder;
 
 
 
 
-    
-public class RegisterApplicationMasterResponsePBImpl 
-extends ProtoBase<RegisterApplicationMasterResponseProto> 
+public class RegisterApplicationMasterResponsePBImpl
+extends ProtoBase<RegisterApplicationMasterResponseProto>
 implements RegisterApplicationMasterResponse {
 implements RegisterApplicationMasterResponse {
-  RegisterApplicationMasterResponseProto proto = 
+  RegisterApplicationMasterResponseProto proto =
     RegisterApplicationMasterResponseProto.getDefaultInstance();
     RegisterApplicationMasterResponseProto.getDefaultInstance();
   RegisterApplicationMasterResponseProto.Builder builder = null;
   RegisterApplicationMasterResponseProto.Builder builder = null;
   boolean viaProto = false;
   boolean viaProto = false;
-  
+
   private Resource minimumResourceCapability;
   private Resource minimumResourceCapability;
   private Resource maximumResourceCapability;
   private Resource maximumResourceCapability;
-  
+
   public RegisterApplicationMasterResponsePBImpl() {
   public RegisterApplicationMasterResponsePBImpl() {
     builder = RegisterApplicationMasterResponseProto.newBuilder();
     builder = RegisterApplicationMasterResponseProto.newBuilder();
   }
   }
@@ -49,16 +47,16 @@ implements RegisterApplicationMasterResponse {
     this.proto = proto;
     this.proto = proto;
     viaProto = true;
     viaProto = true;
   }
   }
-  
+
   public RegisterApplicationMasterResponseProto getProto() {
   public RegisterApplicationMasterResponseProto getProto() {
     mergeLocalToProto();
     mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
     proto = viaProto ? proto : builder.build();
     viaProto = true;
     viaProto = true;
     return proto;
     return proto;
   }
   }
-  
+
   private void mergeLocalToProto() {
   private void mergeLocalToProto() {
-    if (viaProto) 
+    if (viaProto)
       maybeInitBuilder();
       maybeInitBuilder();
     mergeLocalToBuilder();
     mergeLocalToBuilder();
     proto = builder.build();
     proto = builder.build();
@@ -94,7 +92,7 @@ implements RegisterApplicationMasterResponse {
     if (!p.hasMaximumCapability()) {
     if (!p.hasMaximumCapability()) {
       return null;
       return null;
     }
     }
-    
+
     this.maximumResourceCapability = convertFromProtoFormat(p.getMaximumCapability());
     this.maximumResourceCapability = convertFromProtoFormat(p.getMaximumCapability());
     return this.maximumResourceCapability;
     return this.maximumResourceCapability;
   }
   }
@@ -109,7 +107,7 @@ implements RegisterApplicationMasterResponse {
     if (!p.hasMinimumCapability()) {
     if (!p.hasMinimumCapability()) {
       return null;
       return null;
     }
     }
-    
+
     this.minimumResourceCapability = convertFromProtoFormat(p.getMinimumCapability());
     this.minimumResourceCapability = convertFromProtoFormat(p.getMinimumCapability());
     return this.minimumResourceCapability;
     return this.minimumResourceCapability;
   }
   }
@@ -140,4 +138,4 @@ implements RegisterApplicationMasterResponse {
     return ((ResourcePBImpl)resource).getProto();
     return ((ResourcePBImpl)resource).getProto();
   }
   }
 
 
-}  
+}

+ 12 - 12
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationMaster.java

@@ -22,38 +22,38 @@ import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 
 
 /**
 /**
- * <em>For internal use only...</em> 
+ * <em>For internal use only...</em>
  */
  */
 @Private
 @Private
 @Unstable
 @Unstable
 public interface ApplicationMaster {
 public interface ApplicationMaster {
   ApplicationId getApplicationId();
   ApplicationId getApplicationId();
   void setApplicationId(ApplicationId appId);
   void setApplicationId(ApplicationId appId);
-  
+
   String getHost();
   String getHost();
   void setHost(String host);
   void setHost(String host);
-  
+
   int getRpcPort();
   int getRpcPort();
   void setRpcPort(int rpcPort);
   void setRpcPort(int rpcPort);
-  
+
   String getTrackingUrl();
   String getTrackingUrl();
   void setTrackingUrl(String url);
   void setTrackingUrl(String url);
-  
+
   ApplicationStatus getStatus();
   ApplicationStatus getStatus();
   void setStatus(ApplicationStatus status);
   void setStatus(ApplicationStatus status);
-  
-  ApplicationState getState();
-  void setState(ApplicationState state);
-  
+
+  YarnApplicationState getState();
+  void setState(YarnApplicationState state);
+
   String getClientToken();
   String getClientToken();
   void setClientToken(String clientToken);
   void setClientToken(String clientToken);
-  
+
   int getAMFailCount();
   int getAMFailCount();
   void setAMFailCount(int amFailCount);
   void setAMFailCount(int amFailCount);
-  
+
   int getContainerCount();
   int getContainerCount();
   void setContainerCount(int containerCount);
   void setContainerCount(int containerCount);
-  
+
   String getDiagnostics();
   String getDiagnostics();
   void setDiagnostics(String diagnostics);
   void setDiagnostics(String diagnostics);
 }
 }

+ 38 - 25
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationReport.java

@@ -36,13 +36,13 @@ import org.apache.hadoop.yarn.api.ClientRMProtocol;
  *     <li>Host on which the <code>ApplicationMaster</code>is running.</li>
  *     <li>Host on which the <code>ApplicationMaster</code>is running.</li>
  *     <li>RPC port of the <code>ApplicationMaster</code>.</li>
  *     <li>RPC port of the <code>ApplicationMaster</code>.</li>
  *     <li>Tracking URL.</li>
  *     <li>Tracking URL.</li>
- *     <li>{@link ApplicationState} of the application.</li>
+ *     <li>{@link YarnApplicationState} of the application.</li>
  *     <li>Diagnostic information in case of errors.</li>
  *     <li>Diagnostic information in case of errors.</li>
  *     <li>Start time of the application.</li>
  *     <li>Start time of the application.</li>
  *     <li>Client token of the application (if security is enabled).</li>
  *     <li>Client token of the application (if security is enabled).</li>
  *   </ul>
  *   </ul>
  * </p>
  * </p>
- * 
+ *
  * @see ClientRMProtocol#getApplicationReport(org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest)
  * @see ClientRMProtocol#getApplicationReport(org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest)
  */
  */
 @Public
 @Public
@@ -56,7 +56,7 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   ApplicationId getApplicationId();
   ApplicationId getApplicationId();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setApplicationId(ApplicationId applicationId);
   void setApplicationId(ApplicationId applicationId);
@@ -68,7 +68,7 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   String getUser();
   String getUser();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setUser(String user);
   void setUser(String user);
@@ -80,7 +80,7 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   String getQueue();
   String getQueue();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setQueue(String queue);
   void setQueue(String queue);
@@ -92,21 +92,21 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   String getName();
   String getName();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setName(String name);
   void setName(String name);
 
 
   /**
   /**
-   * Get the <em>host</em> on which the <code>ApplicationMaster</code> 
+   * Get the <em>host</em> on which the <code>ApplicationMaster</code>
    * is running.
    * is running.
-   * @return <em>host</em> on which the <code>ApplicationMaster</code> 
+   * @return <em>host</em> on which the <code>ApplicationMaster</code>
    *         is running
    *         is running
    */
    */
   @Public
   @Public
   @Stable
   @Stable
   String getHost();
   String getHost();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setHost(String host);
   void setHost(String host);
@@ -118,47 +118,47 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   int getRpcPort();
   int getRpcPort();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setRpcPort(int rpcPort);
   void setRpcPort(int rpcPort);
 
 
   /**
   /**
-   * Get the <em>client token</em> for communicating with the 
+   * Get the <em>client token</em> for communicating with the
    * <code>ApplicationMaster</code>.
    * <code>ApplicationMaster</code>.
-   * @return <em>client token</em> for communicating with the 
+   * @return <em>client token</em> for communicating with the
    * <code>ApplicationMaster</code>
    * <code>ApplicationMaster</code>
    */
    */
   @Public
   @Public
   @Stable
   @Stable
   String getClientToken();
   String getClientToken();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setClientToken(String clientToken);
   void setClientToken(String clientToken);
 
 
   /**
   /**
-   * Get the <code>ApplicationState</code> of the application.
-   * @return <code>ApplicationState</code> of the application
+   * Get the <code>YarnApplicationState</code> of the application.
+   * @return <code>YarnApplicationState</code> of the application
    */
    */
   @Public
   @Public
   @Stable
   @Stable
-  ApplicationState getState();
-  
+  YarnApplicationState getYarnApplicationState();
+
   @Private
   @Private
   @Unstable
   @Unstable
-  void setState(ApplicationState state);
+  void setYarnApplicationState(YarnApplicationState state);
 
 
   /**
   /**
-   * Get  the <em>diagnositic information</em> of the application in case of 
+   * Get  the <em>diagnositic information</em> of the application in case of
    * errors.
    * errors.
-   * @return <em>diagnositic information</em> of the application in case 
+   * @return <em>diagnositic information</em> of the application in case
    *         of errors
    *         of errors
    */
    */
   @Public
   @Public
   @Stable
   @Stable
   String getDiagnostics();
   String getDiagnostics();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setDiagnostics(String diagnostics);
   void setDiagnostics(String diagnostics);
@@ -170,11 +170,11 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   String getTrackingUrl();
   String getTrackingUrl();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setTrackingUrl(String url);
   void setTrackingUrl(String url);
-  
+
   /**
   /**
    * Get the <em>start time</em> of the application.
    * Get the <em>start time</em> of the application.
    * @return <em>start time</em> of the application
    * @return <em>start time</em> of the application
@@ -182,7 +182,7 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   long getStartTime();
   long getStartTime();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setStartTime(long startTime);
   void setStartTime(long startTime);
@@ -194,8 +194,21 @@ public interface ApplicationReport {
   @Public
   @Public
   @Stable
   @Stable
   long getFinishTime();
   long getFinishTime();
-  
+
   @Private
   @Private
   @Unstable
   @Unstable
   void setFinishTime(long finishTime);
   void setFinishTime(long finishTime);
+
+
+  /**
+   * Get the <em>final finish status</em> of the application.
+   */
+  @Public
+  @Stable
+  FinalApplicationStatus getFinalApplicationStatus();
+
+  @Private
+  @Unstable
+  void setFinalApplicationStatus(FinalApplicationStatus finishState);
+
 }
 }

+ 42 - 0
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/FinalApplicationStatus.java

@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.api.records;
+
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Stable;
+
+/**
+ * Enumeration of various final states of an <code>Application</code>.
+ */
+@Public
+@Stable
+public enum FinalApplicationStatus {
+
+  /** Undefined state when either the application has not yet finished */
+  UNDEFINED,
+
+  /** Application which finished successfully. */
+  SUCCEEDED,
+
+  /** Application which failed. */
+  FAILED,
+
+  /** Application which was terminated by a user or admin. */
+  KILLED
+}

+ 13 - 13
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationState.java → hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/YarnApplicationState.java

@@ -22,26 +22,26 @@ import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
 
 
 /**
 /**
- * Ennumeration of various states of an <code>Application</code>.
+ * Ennumeration of various states of an <code>ApplicationMaster</code>.
  */
  */
 @Public
 @Public
 @Stable
 @Stable
-public enum ApplicationState {
+public enum YarnApplicationState {
   /** Application which was just created. */
   /** Application which was just created. */
-  NEW, 
-  
+  NEW,
+
   /** Application which has been submitted. */
   /** Application which has been submitted. */
-  SUBMITTED, 
-  
+  SUBMITTED,
+
   /** Application which is currently running. */
   /** Application which is currently running. */
-  RUNNING, 
-  
-  /** Application which completed successfully. */
-  SUCCEEDED, 
-  
+  RUNNING,
+
+  /** Application which finished successfully. */
+  FINISHED,
+
   /** Application which failed. */
   /** Application which failed. */
-  FAILED, 
-  
+  FAILED,
+
   /** Application which was terminated by a user or admin. */
   /** Application which was terminated by a user or admin. */
   KILLED
   KILLED
 }
 }

+ 23 - 26
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationMasterPBImpl.java

@@ -19,31 +19,28 @@
 package org.apache.hadoop.yarn.api.records.impl.pb;
 package org.apache.hadoop.yarn.api.records.impl.pb;
 
 
 
 
-import java.util.List;
-
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationMasterProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationMasterProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationMasterProtoOrBuilder;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationMasterProtoOrBuilder;
-import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStateProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStatusProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStatusProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
 import org.apache.hadoop.yarn.util.ProtoUtils;
 import org.apache.hadoop.yarn.util.ProtoUtils;
 
 
 
 
-    
 public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> implements ApplicationMaster {
 public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> implements ApplicationMaster {
   ApplicationMasterProto proto = ApplicationMasterProto.getDefaultInstance();
   ApplicationMasterProto proto = ApplicationMasterProto.getDefaultInstance();
   ApplicationMasterProto.Builder builder = null;
   ApplicationMasterProto.Builder builder = null;
   boolean viaProto = false;
   boolean viaProto = false;
-  
+
   private ApplicationId applicationId = null;
   private ApplicationId applicationId = null;
   private ApplicationStatus applicationStatus = null;
   private ApplicationStatus applicationStatus = null;
-  
-  
+
+
   public ApplicationMasterPBImpl() {
   public ApplicationMasterPBImpl() {
     builder = ApplicationMasterProto.newBuilder();
     builder = ApplicationMasterProto.newBuilder();
   }
   }
@@ -52,9 +49,9 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
     this.proto = proto;
     this.proto = proto;
     viaProto = true;
     viaProto = true;
   }
   }
-  
+
   public ApplicationMasterProto getProto() {
   public ApplicationMasterProto getProto() {
-  
+
       mergeLocalToProto();
       mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
     proto = viaProto ? proto : builder.build();
     viaProto = true;
     viaProto = true;
@@ -72,24 +69,24 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
   }
   }
 
 
   private void mergeLocalToProto() {
   private void mergeLocalToProto() {
-    if (viaProto) 
+    if (viaProto)
       maybeInitBuilder();
       maybeInitBuilder();
     mergeLocalToBuilder();
     mergeLocalToBuilder();
     proto = builder.build();
     proto = builder.build();
-    
+
     viaProto = true;
     viaProto = true;
   }
   }
-  
+
   private void maybeInitBuilder() {
   private void maybeInitBuilder() {
     if (viaProto || builder == null) {
     if (viaProto || builder == null) {
       builder = ApplicationMasterProto.newBuilder(proto);
       builder = ApplicationMasterProto.newBuilder(proto);
     }
     }
     viaProto = false;
     viaProto = false;
   }
   }
-    
-  
+
+
   @Override
   @Override
-  public ApplicationState getState() {
+  public YarnApplicationState getState() {
     ApplicationMasterProtoOrBuilder p = viaProto ? proto : builder;
     ApplicationMasterProtoOrBuilder p = viaProto ? proto : builder;
     if (!p.hasState()) {
     if (!p.hasState()) {
       return null;
       return null;
@@ -98,7 +95,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
   }
   }
 
 
   @Override
   @Override
-  public void setState(ApplicationState state) {
+  public void setState(YarnApplicationState state) {
     maybeInitBuilder();
     maybeInitBuilder();
     if (state == null) {
     if (state == null) {
       builder.clearState();
       builder.clearState();
@@ -124,7 +121,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
     }
     }
     builder.setHost((host));
     builder.setHost((host));
   }
   }
-  
+
   @Override
   @Override
   public ApplicationId getApplicationId() {
   public ApplicationId getApplicationId() {
     ApplicationMasterProtoOrBuilder p = viaProto ? proto : builder;
     ApplicationMasterProtoOrBuilder p = viaProto ? proto : builder;
@@ -135,7 +132,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
       return null;
       return null;
     }
     }
     applicationId = convertFromProtoFormat(p.getApplicationId());
     applicationId = convertFromProtoFormat(p.getApplicationId());
-    
+
     return applicationId;
     return applicationId;
   }
   }
 
 
@@ -145,7 +142,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
     if (applicationId == null)
     if (applicationId == null)
       builder.clearApplicationId();
       builder.clearApplicationId();
     this.applicationId = applicationId;
     this.applicationId = applicationId;
-    
+
   }
   }
   @Override
   @Override
   public int getRpcPort() {
   public int getRpcPort() {
@@ -179,7 +176,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
       return null;
       return null;
     }
     }
     this.applicationStatus = convertFromProtoFormat(p.getStatus());
     this.applicationStatus = convertFromProtoFormat(p.getStatus());
-    
+
     return this.applicationStatus;
     return this.applicationStatus;
   }
   }
 
 
@@ -189,7 +186,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
     if (status == null)
     if (status == null)
       builder.clearStatus();
       builder.clearStatus();
     this.applicationStatus = status;
     this.applicationStatus = status;
-    
+
   }
   }
   @Override
   @Override
   public String getClientToken() {
   public String getClientToken() {
@@ -209,7 +206,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
     }
     }
     builder.setClientToken((clientToken));
     builder.setClientToken((clientToken));
   }
   }
-  
+
   @Override
   @Override
   public int getAMFailCount() {
   public int getAMFailCount() {
     ApplicationMasterProtoOrBuilder p = viaProto ? proto : builder;
     ApplicationMasterProtoOrBuilder p = viaProto ? proto : builder;
@@ -250,11 +247,11 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
     builder.setDiagnostics(diagnostics);
     builder.setDiagnostics(diagnostics);
   }
   }
 
 
-  private ApplicationStateProto convertToProtoFormat(ApplicationState e) {
+  private YarnApplicationStateProto convertToProtoFormat(YarnApplicationState e) {
     return ProtoUtils.convertToProtoFormat(e);
     return ProtoUtils.convertToProtoFormat(e);
   }
   }
 
 
-  private ApplicationState convertFromProtoFormat(ApplicationStateProto e) {
+  private YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto e) {
     return ProtoUtils.convertFromProtoFormat(e);
     return ProtoUtils.convertFromProtoFormat(e);
   }
   }
 
 
@@ -274,4 +271,4 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
     return ((ApplicationStatusPBImpl)t).getProto();
     return ((ApplicationStatusPBImpl)t).getProto();
   }
   }
 
 
-}  
+}

+ 64 - 34
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java

@@ -18,17 +18,19 @@
 
 
 package org.apache.hadoop.yarn.api.records.impl.pb;
 package org.apache.hadoop.yarn.api.records.impl.pb;
 
 
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder;
-import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStateProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
 import org.apache.hadoop.yarn.util.ProtoUtils;
 import org.apache.hadoop.yarn.util.ProtoUtils;
 
 
-public class ApplicationReportPBImpl extends ProtoBase<ApplicationReportProto> 
+public class ApplicationReportPBImpl extends ProtoBase<ApplicationReportProto>
 implements ApplicationReport {
 implements ApplicationReport {
   ApplicationReportProto proto = ApplicationReportProto.getDefaultInstance();
   ApplicationReportProto proto = ApplicationReportProto.getDefaultInstance();
   ApplicationReportProto.Builder builder = null;
   ApplicationReportProto.Builder builder = null;
@@ -39,7 +41,7 @@ implements ApplicationReport {
   public ApplicationReportPBImpl() {
   public ApplicationReportPBImpl() {
     builder = ApplicationReportProto.newBuilder();
     builder = ApplicationReportProto.newBuilder();
   }
   }
-  
+
   public ApplicationReportPBImpl(ApplicationReportProto proto) {
   public ApplicationReportPBImpl(ApplicationReportProto proto) {
     this.proto = proto;
     this.proto = proto;
     viaProto = true;
     viaProto = true;
@@ -87,12 +89,12 @@ implements ApplicationReport {
   }
   }
 
 
   @Override
   @Override
-  public ApplicationState getState() {
+  public YarnApplicationState getYarnApplicationState() {
     ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
     ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
-    if (!p.hasState()) {
+    if (!p.hasYarnApplicationState()) {
       return null;
       return null;
     }
     }
-    return convertFromProtoFormat(p.getState());
+    return convertFromProtoFormat(p.getYarnApplicationState());
   }
   }
 
 
   @Override
   @Override
@@ -138,6 +140,27 @@ implements ApplicationReport {
     return p.getDiagnostics();
     return p.getDiagnostics();
   }
   }
 
 
+  @Override
+  public long getStartTime() {
+    ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
+    return p.getStartTime();
+  }
+
+  @Override
+  public long getFinishTime() {
+    ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
+    return p.getFinishTime();
+  }
+
+  @Override
+  public FinalApplicationStatus getFinalApplicationStatus() {
+    ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
+    if (!p.hasFinalApplicationStatus()) {
+      return null;
+    }	
+    return convertFromProtoFormat(p.getFinalApplicationStatus());
+  }
+
   @Override
   @Override
   public void setApplicationId(ApplicationId applicationId) {
   public void setApplicationId(ApplicationId applicationId) {
     maybeInitBuilder();
     maybeInitBuilder();
@@ -177,13 +200,13 @@ implements ApplicationReport {
   }
   }
 
 
   @Override
   @Override
-  public void setState(ApplicationState state) {
+  public void setYarnApplicationState(YarnApplicationState state) {
     maybeInitBuilder();
     maybeInitBuilder();
     if (state == null) {
     if (state == null) {
-      builder.clearState();
+      builder.clearYarnApplicationState();
       return;
       return;
     }
     }
-    builder.setState(convertToProtoFormat(state));
+    builder.setYarnApplicationState(convertToProtoFormat(state));
   }
   }
 
 
   @Override
   @Override
@@ -232,20 +255,6 @@ implements ApplicationReport {
     builder.setDiagnostics(diagnostics);
     builder.setDiagnostics(diagnostics);
   }
   }
 
 
-  @Override
-  public ApplicationReportProto getProto() {
-    mergeLocalToProto();
-    proto = viaProto ? proto : builder.build();
-    viaProto = true;
-    return proto;
-  }
-
-  @Override
-  public long getStartTime() {
-    ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
-    return p.getStartTime();
-  }
-
   @Override
   @Override
   public void setStartTime(long startTime) {
   public void setStartTime(long startTime) {
     maybeInitBuilder();
     maybeInitBuilder();
@@ -253,15 +262,27 @@ implements ApplicationReport {
   }
   }
 
 
   @Override
   @Override
-  public long getFinishTime() {
-    ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
-    return p.getFinishTime();
+  public void setFinishTime(long finishTime) {
+    maybeInitBuilder();
+    builder.setFinishTime(finishTime);
   }
   }
 
 
   @Override
   @Override
-  public void setFinishTime(long finishTime) {
+  public void setFinalApplicationStatus(FinalApplicationStatus finishState) {
     maybeInitBuilder();
     maybeInitBuilder();
-    builder.setFinishTime(finishTime);
+    if (finishState == null) {
+      builder.clearFinalApplicationStatus();
+      return;
+    }
+    builder.setFinalApplicationStatus(convertToProtoFormat(finishState));
+  }
+
+  @Override
+  public ApplicationReportProto getProto() {
+    mergeLocalToProto();
+    proto = viaProto ? proto : builder.build();
+    viaProto = true;
+    return proto;
   }
   }
 
 
   private void mergeLocalToBuilder() {
   private void mergeLocalToBuilder() {
@@ -291,16 +312,25 @@ implements ApplicationReport {
     return ((ApplicationIdPBImpl) t).getProto();
     return ((ApplicationIdPBImpl) t).getProto();
   }
   }
 
 
-  private ApplicationState convertFromProtoFormat(ApplicationStateProto s) {
+  private ApplicationIdPBImpl convertFromProtoFormat(
+      ApplicationIdProto applicationId) {
+    return new ApplicationIdPBImpl(applicationId);
+  }
+
+  private YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto s) {
     return ProtoUtils.convertFromProtoFormat(s);
     return ProtoUtils.convertFromProtoFormat(s);
   }
   }
 
 
-  private ApplicationStateProto convertToProtoFormat(ApplicationState s) {
+  private YarnApplicationStateProto convertToProtoFormat(YarnApplicationState s) {
     return ProtoUtils.convertToProtoFormat(s);
     return ProtoUtils.convertToProtoFormat(s);
   }
   }
 
 
-  private ApplicationIdPBImpl convertFromProtoFormat(
-      ApplicationIdProto applicationId) {
-    return new ApplicationIdPBImpl(applicationId);
+  private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) {
+    return ProtoUtils.convertFromProtoFormat(s);
   }
   }
+
+  private FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus s) {
+    return ProtoUtils.convertToProtoFormat(s);
+  }
+
 }
 }

+ 28 - 15
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/ProtoUtils.java

@@ -20,24 +20,26 @@ package org.apache.hadoop.yarn.util;
 
 
 import java.nio.ByteBuffer;
 import java.nio.ByteBuffer;
 
 
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ContainerState;
 import org.apache.hadoop.yarn.api.records.ContainerState;
 import org.apache.hadoop.yarn.api.records.LocalResourceType;
 import org.apache.hadoop.yarn.api.records.LocalResourceType;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueState;
 import org.apache.hadoop.yarn.api.records.QueueState;
-import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStateProto;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
 
 
 import com.google.protobuf.ByteString;
 import com.google.protobuf.ByteString;
 
 
 public class ProtoUtils {
 public class ProtoUtils {
-  
-  
+
+
   /*
   /*
    * ContainerState
    * ContainerState
    */
    */
@@ -48,18 +50,29 @@ public class ProtoUtils {
   public static ContainerState convertFromProtoFormat(ContainerStateProto e) {
   public static ContainerState convertFromProtoFormat(ContainerStateProto e) {
     return ContainerState.valueOf(e.name().replace(CONTAINER_STATE_PREFIX, ""));
     return ContainerState.valueOf(e.name().replace(CONTAINER_STATE_PREFIX, ""));
   }
   }
-  
+
 
 
   /*
   /*
-   * ApplicationState
+   * YarnApplicationState
    */
    */
-  public static ApplicationStateProto convertToProtoFormat(ApplicationState e) {
-    return ApplicationStateProto.valueOf(e.name());
+  public static YarnApplicationStateProto convertToProtoFormat(YarnApplicationState e) {
+    return YarnApplicationStateProto.valueOf(e.name());
   }
   }
-  public static ApplicationState convertFromProtoFormat(ApplicationStateProto e) {
-    return ApplicationState.valueOf(e.name());
+  public static YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto e) {
+    return YarnApplicationState.valueOf(e.name());
   }
   }
-  
+
+  /*
+   * FinalApplicationStatus
+   */
+  private static String FINAL_APPLICATION_STATUS_PREFIX = "APP_";
+  public static FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus e) {
+    return FinalApplicationStatusProto.valueOf(FINAL_APPLICATION_STATUS_PREFIX + e.name());
+  }
+  public static FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto e) {
+    return FinalApplicationStatus.valueOf(e.name().replace(FINAL_APPLICATION_STATUS_PREFIX, ""));
+  }
+
   /*
   /*
    * LocalResourceType
    * LocalResourceType
    */
    */
@@ -69,7 +82,7 @@ public class ProtoUtils {
   public static LocalResourceType convertFromProtoFormat(LocalResourceTypeProto e) {
   public static LocalResourceType convertFromProtoFormat(LocalResourceTypeProto e) {
     return LocalResourceType.valueOf(e.name());
     return LocalResourceType.valueOf(e.name());
   }
   }
-  
+
   /*
   /*
    * LocalResourceVisibility
    * LocalResourceVisibility
    */
    */
@@ -79,7 +92,7 @@ public class ProtoUtils {
   public static LocalResourceVisibility convertFromProtoFormat(LocalResourceVisibilityProto e) {
   public static LocalResourceVisibility convertFromProtoFormat(LocalResourceVisibilityProto e) {
     return LocalResourceVisibility.valueOf(e.name());
     return LocalResourceVisibility.valueOf(e.name());
   }
   }
-  
+
   /*
   /*
    * ByteBuffer
    * ByteBuffer
    */
    */
@@ -98,7 +111,7 @@ public class ProtoUtils {
     byteBuffer.position(oldPos);
     byteBuffer.position(oldPos);
     return bs;
     return bs;
   }
   }
-  
+
   /*
   /*
    * QueueState
    * QueueState
    */
    */
@@ -109,7 +122,7 @@ public class ProtoUtils {
   public static QueueState convertFromProtoFormat(QueueStateProto e) {
   public static QueueState convertFromProtoFormat(QueueStateProto e) {
     return QueueState.valueOf(e.name().replace(QUEUE_STATE_PREFIX, ""));
     return QueueState.valueOf(e.name().replace(QUEUE_STATE_PREFIX, ""));
   }
   }
-  
+
   /*
   /*
    * QueueACL
    * QueueACL
    */
    */

+ 16 - 9
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto

@@ -76,14 +76,20 @@ message ContainerProto {
   optional ContainerStatusProto container_status = 8;
   optional ContainerStatusProto container_status = 8;
 }
 }
 
 
-enum ApplicationStateProto {
+enum YarnApplicationStateProto {
   NEW = 1;
   NEW = 1;
   SUBMITTED = 2;
   SUBMITTED = 2;
   RUNNING = 3;
   RUNNING = 3;
-  RESTARTING = 4;
-  SUCCEEDED = 5;
-  FAILED = 6;
-  KILLED = 7; 
+  FINISHED = 4;
+  FAILED = 5;
+  KILLED = 6;
+}
+
+enum FinalApplicationStatusProto {
+  APP_UNDEFINED = 0;
+  APP_SUCCEEDED = 1;
+  APP_FAILED = 2;
+  APP_KILLED = 3;
 }
 }
 
 
 message ApplicationStatusProto {
 message ApplicationStatusProto {
@@ -98,7 +104,7 @@ message ApplicationMasterProto {
   optional int32 rpc_port = 3;
   optional int32 rpc_port = 3;
   optional string trackingUrl = 4;
   optional string trackingUrl = 4;
   optional ApplicationStatusProto status = 5;
   optional ApplicationStatusProto status = 5;
-  optional ApplicationStateProto state = 6;
+  optional YarnApplicationStateProto state = 6;
   optional string client_token = 7;
   optional string client_token = 7;
   optional int32 containerCount = 8;
   optional int32 containerCount = 8;
   optional int32 amFailCount = 9;
   optional int32 amFailCount = 9;
@@ -107,7 +113,7 @@ message ApplicationMasterProto {
 
 
 message URLProto {
 message URLProto {
   optional string scheme = 1;
   optional string scheme = 1;
-  optional string host = 2; 
+  optional string host = 2;
   optional int32 port = 3;
   optional int32 port = 3;
   optional string file = 4;
   optional string file = 4;
 }
 }
@@ -140,12 +146,13 @@ message ApplicationReportProto {
   optional int32 rpc_port = 6;
   optional int32 rpc_port = 6;
   optional string client_token = 7;
   optional string client_token = 7;
   optional ApplicationStatusProto status = 8;
   optional ApplicationStatusProto status = 8;
-  optional ApplicationStateProto state = 9;
+  optional YarnApplicationStateProto yarn_application_state = 9;
   optional ContainerProto masterContainer = 10;
   optional ContainerProto masterContainer = 10;
   optional string trackingUrl = 11;
   optional string trackingUrl = 11;
   optional string diagnostics = 12 [default = "N/A"];
   optional string diagnostics = 12 [default = "N/A"];
   optional int64 startTime = 13;
   optional int64 startTime = 13;
   optional int64 finishTime = 14;
   optional int64 finishTime = 14;
+  optional FinalApplicationStatusProto final_application_status = 15;
 }
 }
 
 
 message NodeIdProto {
 message NodeIdProto {
@@ -195,7 +202,7 @@ message AMResponseProto {
 message ApplicationSubmissionContextProto {
 message ApplicationSubmissionContextProto {
   optional ApplicationIdProto application_id = 1;
   optional ApplicationIdProto application_id = 1;
   optional string application_name = 2 [default = "N/A"];
   optional string application_name = 2 [default = "N/A"];
-  optional string user = 3; 
+  optional string user = 3;
   optional string queue = 4 [default = "default"];
   optional string queue = 4 [default = "default"];
   optional PriorityProto priority = 5;
   optional PriorityProto priority = 5;
   optional ContainerLaunchContextProto am_container_spec = 6;
   optional ContainerLaunchContextProto am_container_spec = 6;

+ 3 - 3
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto

@@ -42,7 +42,7 @@ message FinishApplicationMasterRequestProto {
   optional ApplicationAttemptIdProto application_attempt_id = 1;
   optional ApplicationAttemptIdProto application_attempt_id = 1;
   optional string diagnostics = 2;
   optional string diagnostics = 2;
   optional string tracking_url = 3;
   optional string tracking_url = 3;
-  optional string final_state = 4;
+  optional FinalApplicationStatusProto final_application_status = 4;
 }
 }
 
 
 message FinishApplicationMasterResponseProto {
 message FinishApplicationMasterResponseProto {
@@ -115,7 +115,7 @@ message GetClusterNodesRequestProto {
 }
 }
 
 
 message GetClusterNodesResponseProto {
 message GetClusterNodesResponseProto {
-  repeated NodeReportProto nodeReports = 1; 
+  repeated NodeReportProto nodeReports = 1;
 }
 }
 
 
 message GetQueueInfoRequestProto {
 message GetQueueInfoRequestProto {
@@ -133,7 +133,7 @@ message GetQueueUserAclsInfoRequestProto {
 }
 }
 
 
 message GetQueueUserAclsInfoResponseProto {
 message GetQueueUserAclsInfoResponseProto {
-  repeated QueueUserACLInfoProto queueUserAcls = 1; 
+  repeated QueueUserACLInfoProto queueUserAcls = 1;
 }
 }
 
 
 //////////////////////////////////////////////////////
 //////////////////////////////////////////////////////

+ 13 - 10
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java

@@ -24,9 +24,10 @@ import java.util.List;
 
 
 import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerState;
 import org.apache.hadoop.yarn.api.records.ContainerState;
@@ -69,12 +70,12 @@ public class BuilderUtils {
     }
     }
   }
   }
 
 
-  public static class ResourceRequestComparator 
+  public static class ResourceRequestComparator
   implements java.util.Comparator<org.apache.hadoop.yarn.api.records.ResourceRequest> {
   implements java.util.Comparator<org.apache.hadoop.yarn.api.records.ResourceRequest> {
     @Override
     @Override
     public int compare(org.apache.hadoop.yarn.api.records.ResourceRequest r1,
     public int compare(org.apache.hadoop.yarn.api.records.ResourceRequest r1,
         org.apache.hadoop.yarn.api.records.ResourceRequest r2) {
         org.apache.hadoop.yarn.api.records.ResourceRequest r2) {
-      
+
       // Compare priority, host and capability
       // Compare priority, host and capability
       int ret = r1.getPriority().compareTo(r2.getPriority());
       int ret = r1.getPriority().compareTo(r2.getPriority());
       if (ret == 0) {
       if (ret == 0) {
@@ -198,12 +199,12 @@ public class BuilderUtils {
       String nodeHttpAddress, Resource resource, Priority priority) {
       String nodeHttpAddress, Resource resource, Priority priority) {
     ContainerId containerID =
     ContainerId containerID =
         newContainerId(recordFactory, appAttemptId, containerId);
         newContainerId(recordFactory, appAttemptId, containerId);
-    return newContainer(containerID, nodeId, nodeHttpAddress, 
+    return newContainer(containerID, nodeId, nodeHttpAddress,
         resource, priority);
         resource, priority);
   }
   }
 
 
   public static Container newContainer(ContainerId containerId,
   public static Container newContainer(ContainerId containerId,
-      NodeId nodeId, String nodeHttpAddress, 
+      NodeId nodeId, String nodeHttpAddress,
       Resource resource, Priority priority) {
       Resource resource, Priority priority) {
     Container container = recordFactory.newRecordInstance(Container.class);
     Container container = recordFactory.newRecordInstance(Container.class);
     container.setId(containerId);
     container.setId(containerId);
@@ -242,8 +243,9 @@ public class BuilderUtils {
 
 
   public static ApplicationReport newApplicationReport(
   public static ApplicationReport newApplicationReport(
       ApplicationId applicationId, String user, String queue, String name,
       ApplicationId applicationId, String user, String queue, String name,
-      String host, int rpcPort, String clientToken, ApplicationState state,
-      String diagnostics, String url, long startTime, long finishTime) {
+      String host, int rpcPort, String clientToken, YarnApplicationState state,
+      String diagnostics, String url, long startTime, long finishTime,
+      FinalApplicationStatus finalStatus) {
     ApplicationReport report = recordFactory
     ApplicationReport report = recordFactory
         .newRecordInstance(ApplicationReport.class);
         .newRecordInstance(ApplicationReport.class);
     report.setApplicationId(applicationId);
     report.setApplicationId(applicationId);
@@ -253,20 +255,21 @@ public class BuilderUtils {
     report.setHost(host);
     report.setHost(host);
     report.setRpcPort(rpcPort);
     report.setRpcPort(rpcPort);
     report.setClientToken(clientToken);
     report.setClientToken(clientToken);
-    report.setState(state);
+    report.setYarnApplicationState(state);
     report.setDiagnostics(diagnostics);
     report.setDiagnostics(diagnostics);
     report.setTrackingUrl(url);
     report.setTrackingUrl(url);
     report.setStartTime(startTime);
     report.setStartTime(startTime);
     report.setFinishTime(finishTime);
     report.setFinishTime(finishTime);
+    report.setFinalApplicationStatus(finalStatus);
     return report;
     return report;
   }
   }
-  
+
   public static Resource newResource(int memory) {
   public static Resource newResource(int memory) {
     Resource resource = recordFactory.newRecordInstance(Resource.class);
     Resource resource = recordFactory.newRecordInstance(Resource.class);
     resource.setMemory(memory);
     resource.setMemory(memory);
     return resource;
     return resource;
   }
   }
-  
+
   public static URL newURL(String scheme, String host, int port, String file) {
   public static URL newURL(String scheme, String host, int port, String file) {
     URL url = recordFactory.newRecordInstance(URL.class);
     URL url = recordFactory.newRecordInstance(URL.class);
     url.setScheme(scheme);
     url.setScheme(scheme);

+ 3 - 0
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java

@@ -33,7 +33,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.URL;
 import org.apache.hadoop.yarn.api.records.URL;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 
 
@@ -181,4 +183,5 @@ public class ConverterUtils {
           + applicationAttmeptIdStr, n);
           + applicationAttmeptIdStr, n);
     }
     }
   }
   }
+  
 }
 }

+ 26 - 20
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java

@@ -24,7 +24,8 @@ import java.util.List;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.util.Records;
 import org.apache.hadoop.yarn.util.Records;
 
 
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Iterators;
@@ -39,8 +40,8 @@ public class MockApps {
       "I18nApp<☯>");
       "I18nApp<☯>");
   static final Iterator<String> USERS = Iterators.cycle("dorothy", "tinman",
   static final Iterator<String> USERS = Iterators.cycle("dorothy", "tinman",
       "scarecrow", "glinda", "nikko", "toto", "winkie", "zeke", "gulch");
       "scarecrow", "glinda", "nikko", "toto", "winkie", "zeke", "gulch");
-  static final Iterator<ApplicationState> STATES = Iterators.cycle(
-      ApplicationState.values());
+  static final Iterator<YarnApplicationState> STATES = Iterators.cycle(
+      YarnApplicationState.values());
   static final Iterator<String> QUEUES = Iterators.cycle("a.a1", "a.a2",
   static final Iterator<String> QUEUES = Iterators.cycle("a.a1", "a.a2",
       "b.b1", "b.b2", "b.b3", "c.c1.c11", "c.c1.c12", "c.c1.c13",
       "b.b1", "b.b2", "b.b3", "c.c1.c11", "c.c1.c12", "c.c1.c13",
       "c.c2", "c.c3", "c.c4");
       "c.c2", "c.c3", "c.c4");
@@ -74,46 +75,47 @@ public class MockApps {
 
 
   public static ApplicationReport newApp(int i) {
   public static ApplicationReport newApp(int i) {
     final ApplicationId id = newAppID(i);
     final ApplicationId id = newAppID(i);
-    final ApplicationState state = newAppState();
+    final YarnApplicationState state = newAppState();
     final String user = newUserName();
     final String user = newUserName();
     final String name = newAppName();
     final String name = newAppName();
     final String queue = newQueue();
     final String queue = newQueue();
+    final FinalApplicationStatus finishState = FinalApplicationStatus.UNDEFINED;
     return new ApplicationReport() {
     return new ApplicationReport() {
       @Override public ApplicationId getApplicationId() { return id; }
       @Override public ApplicationId getApplicationId() { return id; }
       @Override public String getUser() { return user; }
       @Override public String getUser() { return user; }
       @Override public String getName() { return name; }
       @Override public String getName() { return name; }
-      @Override public ApplicationState getState() { return state; }
+      @Override public YarnApplicationState getYarnApplicationState() { return state; }
       @Override public String getQueue() { return queue; }
       @Override public String getQueue() { return queue; }
       @Override public String getTrackingUrl() { return ""; }
       @Override public String getTrackingUrl() { return ""; }
-      @Override
+      @Override public FinalApplicationStatus getFinalApplicationStatus() { return finishState; }
       public void setApplicationId(ApplicationId applicationId) {
       public void setApplicationId(ApplicationId applicationId) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public void setTrackingUrl(String url) {
       public void setTrackingUrl(String url) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public void setName(String name) {
       public void setName(String name) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public void setQueue(String queue) {
       public void setQueue(String queue) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
-      public void setState(ApplicationState state) {
+      public void setYarnApplicationState(YarnApplicationState state) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public void setUser(String user) {
       public void setUser(String user) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public String getDiagnostics() {
       public String getDiagnostics() {
@@ -123,7 +125,7 @@ public class MockApps {
       @Override
       @Override
       public void setDiagnostics(String diagnostics) {
       public void setDiagnostics(String diagnostics) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public String getHost() {
       public String getHost() {
@@ -133,7 +135,7 @@ public class MockApps {
       @Override
       @Override
       public void setHost(String host) {
       public void setHost(String host) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public int getRpcPort() {
       public int getRpcPort() {
@@ -143,7 +145,7 @@ public class MockApps {
       @Override
       @Override
       public void setRpcPort(int rpcPort) {
       public void setRpcPort(int rpcPort) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
       @Override
       @Override
       public String getClientToken() {
       public String getClientToken() {
@@ -153,9 +155,8 @@ public class MockApps {
       @Override
       @Override
       public void setClientToken(String clientToken) {
       public void setClientToken(String clientToken) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
       }
       }
-      
       @Override
       @Override
       public long getStartTime() {
       public long getStartTime() {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
@@ -175,7 +176,11 @@ public class MockApps {
       @Override
       @Override
       public void setFinishTime(long finishTime) {
       public void setFinishTime(long finishTime) {
         // TODO Auto-generated method stub
         // TODO Auto-generated method stub
-        
+
+      }
+      @Override
+      public void setFinalApplicationStatus(FinalApplicationStatus finishState) {
+		// TODO Auto-generated method stub
       }
       }
     };
     };
   }
   }
@@ -194,9 +199,10 @@ public class MockApps {
     return id;
     return id;
   }
   }
 
 
-  public static ApplicationState newAppState() {
+  public static YarnApplicationState newAppState() {
     synchronized(STATES) {
     synchronized(STATES) {
       return STATES.next();
       return STATES.next();
     }
     }
   }
   }
+
 }
 }

+ 7 - 7
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java

@@ -75,7 +75,7 @@ public class ApplicationMasterService extends AbstractService implements
       new ConcurrentHashMap<ApplicationAttemptId, AMResponse>();
       new ConcurrentHashMap<ApplicationAttemptId, AMResponse>();
   private final AMResponse reboot = recordFactory.newRecordInstance(AMResponse.class);
   private final AMResponse reboot = recordFactory.newRecordInstance(AMResponse.class);
   private final RMContext rmContext;
   private final RMContext rmContext;
-  
+
   public ApplicationMasterService(RMContext rmContext,
   public ApplicationMasterService(RMContext rmContext,
       ApplicationTokenSecretManager appTokenManager, YarnScheduler scheduler) {
       ApplicationTokenSecretManager appTokenManager, YarnScheduler scheduler) {
     super(ApplicationMasterService.class.getName());
     super(ApplicationMasterService.class.getName());
@@ -105,12 +105,12 @@ public class ApplicationMasterService extends AbstractService implements
     this.server =
     this.server =
       rpc.getServer(AMRMProtocol.class, this, masterServiceAddress,
       rpc.getServer(AMRMProtocol.class, this, masterServiceAddress,
           serverConf, this.appTokenManager,
           serverConf, this.appTokenManager,
-          serverConf.getInt(YarnConfiguration.RM_SCHEDULER_CLIENT_THREAD_COUNT, 
+          serverConf.getInt(YarnConfiguration.RM_SCHEDULER_CLIENT_THREAD_COUNT,
               YarnConfiguration.DEFAULT_RM_SCHEDULER_CLIENT_THREAD_COUNT));
               YarnConfiguration.DEFAULT_RM_SCHEDULER_CLIENT_THREAD_COUNT));
     this.server.start();
     this.server.start();
     super.start();
     super.start();
   }
   }
-  
+
   @Override
   @Override
   public RegisterApplicationMasterResponse registerApplicationMaster(
   public RegisterApplicationMasterResponse registerApplicationMaster(
       RegisterApplicationMasterRequest request) throws YarnRemoteException {
       RegisterApplicationMasterRequest request) throws YarnRemoteException {
@@ -123,7 +123,7 @@ public class ApplicationMasterService extends AbstractService implements
       String message = "Application doesn't exist in cache "
       String message = "Application doesn't exist in cache "
           + applicationAttemptId;
           + applicationAttemptId;
       LOG.error(message);
       LOG.error(message);
-      RMAuditLogger.logFailure(this.rmContext.getRMApps().get(appID).getUser(), 
+      RMAuditLogger.logFailure(this.rmContext.getRMApps().get(appID).getUser(),
           AuditConstants.REGISTER_AM, message, "ApplicationMasterService",
           AuditConstants.REGISTER_AM, message, "ApplicationMasterService",
           "Error in registering application master", appID,
           "Error in registering application master", appID,
           applicationAttemptId);
           applicationAttemptId);
@@ -141,7 +141,7 @@ public class ApplicationMasterService extends AbstractService implements
               .getHost(), request.getRpcPort(), request.getTrackingUrl()));
               .getHost(), request.getRpcPort(), request.getTrackingUrl()));
 
 
       RMAuditLogger.logSuccess(this.rmContext.getRMApps().get(appID).getUser(),
       RMAuditLogger.logSuccess(this.rmContext.getRMApps().get(appID).getUser(),
-          AuditConstants.REGISTER_AM, "ApplicationMasterService", appID, 
+          AuditConstants.REGISTER_AM, "ApplicationMasterService", appID,
           applicationAttemptId);
           applicationAttemptId);
 
 
       // Pick up min/max resource from scheduler...
       // Pick up min/max resource from scheduler...
@@ -176,7 +176,7 @@ public class ApplicationMasterService extends AbstractService implements
 
 
       rmContext.getDispatcher().getEventHandler().handle(
       rmContext.getDispatcher().getEventHandler().handle(
           new RMAppAttemptUnregistrationEvent(applicationAttemptId, request
           new RMAppAttemptUnregistrationEvent(applicationAttemptId, request
-              .getTrackingUrl(), request.getFinalState(), request
+              .getTrackingUrl(), request.getFinalApplicationStatus(), request
               .getDiagnostics()));
               .getDiagnostics()));
 
 
       FinishApplicationMasterResponse response = recordFactory
       FinishApplicationMasterResponse response = recordFactory
@@ -225,7 +225,7 @@ public class ApplicationMasterService extends AbstractService implements
       List<ContainerId> release = request.getReleaseList();
       List<ContainerId> release = request.getReleaseList();
 
 
       // Send new requests to appAttempt.
       // Send new requests to appAttempt.
-      Allocation allocation = 
+      Allocation allocation =
           this.rScheduler.allocate(appAttemptId, ask, release);
           this.rScheduler.allocate(appAttemptId, ask, release);
 
 
       RMApp app = this.rmContext.getRMApps().get(appAttemptId.getApplicationId());
       RMApp app = this.rmContext.getRMApps().get(appAttemptId.getApplicationId());

+ 13 - 12
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.resourcemanager.rmapp;
 
 
 import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
@@ -28,8 +29,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.recovery.ApplicationsStore.
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
 
 
 /**
 /**
- * The read interface to an Application in the ResourceManager. Take a 
- * look at {@link RMAppImpl} for its implementation. This interface 
+ * The read interface to an Application in the ResourceManager. Take a
+ * look at {@link RMAppImpl} for its implementation. This interface
  * exposes methods to access various updates in application status/report.
  * exposes methods to access various updates in application status/report.
  */
  */
 public interface RMApp extends EventHandler<RMAppEvent> {
 public interface RMApp extends EventHandler<RMAppEvent> {
@@ -68,7 +69,7 @@ public interface RMApp extends EventHandler<RMAppEvent> {
   RMAppAttempt getRMAppAttempt(ApplicationAttemptId appAttemptId);
   RMAppAttempt getRMAppAttempt(ApplicationAttemptId appAttemptId);
 
 
   /**
   /**
-   * Each Application is submitted to a queue decided by {@link 
+   * Each Application is submitted to a queue decided by {@link
    * ApplicationSubmissionContext#setQueue(String)}.
    * ApplicationSubmissionContext#setQueue(String)}.
    * This method returns the queue to which an application was submitted.
    * This method returns the queue to which an application was submitted.
    * @return the queue to which the application was submitted to.
    * @return the queue to which the application was submitted to.
@@ -76,7 +77,7 @@ public interface RMApp extends EventHandler<RMAppEvent> {
   String getQueue();
   String getQueue();
 
 
   /**
   /**
-   * The name of the application as set in {@link 
+   * The name of the application as set in {@link
    * ApplicationSubmissionContext#setApplicationName(String)}.
    * ApplicationSubmissionContext#setApplicationName(String)}.
    * @return the name of the application.
    * @return the name of the application.
    */
    */
@@ -85,7 +86,7 @@ public interface RMApp extends EventHandler<RMAppEvent> {
   /**
   /**
    * {@link RMApp} can have multiple application attempts {@link RMAppAttempt}.
    * {@link RMApp} can have multiple application attempts {@link RMAppAttempt}.
    * This method returns the current {@link RMAppAttempt}.
    * This method returns the current {@link RMAppAttempt}.
-   * @return the current {@link RMAppAttempt} 
+   * @return the current {@link RMAppAttempt}
    */
    */
   RMAppAttempt getCurrentAppAttempt();
   RMAppAttempt getCurrentAppAttempt();
 
 
@@ -96,7 +97,7 @@ public interface RMApp extends EventHandler<RMAppEvent> {
   ApplicationReport createAndGetApplicationReport();
   ApplicationReport createAndGetApplicationReport();
 
 
   /**
   /**
-   * Application level metadata is stored in {@link ApplicationStore} whicn 
+   * Application level metadata is stored in {@link ApplicationStore} whicn
    * can persist the information.
    * can persist the information.
    * @return the {@link ApplicationStore}  for this {@link RMApp}.
    * @return the {@link ApplicationStore}  for this {@link RMApp}.
    */
    */
@@ -125,12 +126,12 @@ public interface RMApp extends EventHandler<RMAppEvent> {
    * @return the diagnostics information for the application master.
    * @return the diagnostics information for the application master.
    */
    */
   StringBuilder getDiagnostics();
   StringBuilder getDiagnostics();
-  
+
   /**
   /**
-   * The final state of the AM when unregistering as in 
-   * {@link FinishApplicationMasterRequest#setFinalState(String)}.
-   * @return the final state of the AM as set in 
-   * {@link FinishApplicationMasterRequest#setFinalState(String)}.
+   * The final finish state of the AM when unregistering as in
+   * {@link FinishApplicationMasterRequest#setFinishApplicationStatus(FinalApplicationStatus)}.
+   * @return the final finish state of the AM as set in
+   * {@link FinishApplicationMasterRequest#setFinishApplicationStatus(FinalApplicationStatus)}.
    */
    */
-  String getAMFinalState();
+  FinalApplicationStatus getFinalApplicationStatus();
 }
 }

+ 46 - 21
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java

@@ -32,9 +32,10 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -94,7 +95,7 @@ public class RMAppImpl implements RMApp {
   private static final StateMachineFactory<RMAppImpl,
   private static final StateMachineFactory<RMAppImpl,
                                            RMAppState,
                                            RMAppState,
                                            RMAppEventType,
                                            RMAppEventType,
-                                           RMAppEvent> stateMachineFactory 
+                                           RMAppEvent> stateMachineFactory
                                = new StateMachineFactory<RMAppImpl,
                                = new StateMachineFactory<RMAppImpl,
                                            RMAppState,
                                            RMAppState,
                                            RMAppEventType,
                                            RMAppEventType,
@@ -160,7 +161,7 @@ public class RMAppImpl implements RMApp {
   public RMAppImpl(ApplicationId applicationId, RMContext rmContext,
   public RMAppImpl(ApplicationId applicationId, RMContext rmContext,
       Configuration config, String name, String user, String queue,
       Configuration config, String name, String user, String queue,
       ApplicationSubmissionContext submissionContext, String clientTokenStr,
       ApplicationSubmissionContext submissionContext, String clientTokenStr,
-      ApplicationStore appStore, 
+      ApplicationStore appStore,
       YarnScheduler scheduler, ApplicationMasterService masterService) {
       YarnScheduler scheduler, ApplicationMasterService masterService) {
 
 
     this.applicationId = applicationId;
     this.applicationId = applicationId;
@@ -194,18 +195,23 @@ public class RMAppImpl implements RMApp {
   }
   }
 
 
   @Override
   @Override
-  public String getAMFinalState() {
+  public FinalApplicationStatus getFinalApplicationStatus() {
     this.readLock.lock();
     this.readLock.lock();
     try {
     try {
-      if (currentAttempt != null) {
-        return currentAttempt.getAMFinalState();
+      // finish state is obtained based on the state machine's current state 
+      // as a fall-back in case the application has not been unregistered 
+      // ( or if the app never unregistered itself )
+      // when the report is requested
+      if (currentAttempt != null 
+          && currentAttempt.getFinalApplicationStatus() != null) {
+        return currentAttempt.getFinalApplicationStatus();   
       }
       }
-      return "UNKNOWN";
+      return createFinalApplicationStatus(this.stateMachine.getCurrentState());
     } finally {
     } finally {
       this.readLock.unlock();
       this.readLock.unlock();
     }
     }
   }
   }
-  
+
   @Override
   @Override
   public RMAppState getState() {
   public RMAppState getState() {
     this.readLock.lock();
     this.readLock.lock();
@@ -273,25 +279,43 @@ public class RMAppImpl implements RMApp {
     return this.appStore;
     return this.appStore;
   }
   }
 
 
-  private ApplicationState createApplicationState(RMAppState rmAppState) {
+  private YarnApplicationState createApplicationState(RMAppState rmAppState) {
     switch(rmAppState) {
     switch(rmAppState) {
     case NEW:
     case NEW:
-      return ApplicationState.NEW;
+      return YarnApplicationState.NEW;
     case SUBMITTED:
     case SUBMITTED:
     case ACCEPTED:
     case ACCEPTED:
-      return ApplicationState.SUBMITTED;
+      return YarnApplicationState.SUBMITTED;
     case RUNNING:
     case RUNNING:
-      return ApplicationState.RUNNING;
+      return YarnApplicationState.RUNNING;
     case FINISHED:
     case FINISHED:
-      return ApplicationState.SUCCEEDED;
+      return YarnApplicationState.FINISHED;
     case KILLED:
     case KILLED:
-      return ApplicationState.KILLED;
+      return YarnApplicationState.KILLED;
+    case FAILED:
+      return YarnApplicationState.FAILED;
+    }
+    throw new YarnException("Unknown state passed!");
+  }
+
+  private FinalApplicationStatus createFinalApplicationStatus(RMAppState state) {
+    switch(state) {
+    case NEW:
+    case SUBMITTED:
+    case ACCEPTED:
+    case RUNNING:
+      return FinalApplicationStatus.UNDEFINED;    
+    // finished without a proper final state is the same as failed  
+    case FINISHED:
     case FAILED:
     case FAILED:
-      return ApplicationState.FAILED;
+      return FinalApplicationStatus.FAILED;
+    case KILLED:
+      return FinalApplicationStatus.KILLED;
     }
     }
     throw new YarnException("Unknown state passed!");
     throw new YarnException("Unknown state passed!");
   }
   }
 
 
+  
   @Override
   @Override
   public ApplicationReport createAndGetApplicationReport() {
   public ApplicationReport createAndGetApplicationReport() {
     this.readLock.lock();
     this.readLock.lock();
@@ -301,6 +325,7 @@ public class RMAppImpl implements RMApp {
       String trackingUrl = "N/A";
       String trackingUrl = "N/A";
       String host = "N/A";
       String host = "N/A";
       int rpcPort = -1;
       int rpcPort = -1;
+      FinalApplicationStatus finishState = getFinalApplicationStatus();
       if (this.currentAttempt != null) {
       if (this.currentAttempt != null) {
         trackingUrl = this.currentAttempt.getTrackingUrl();
         trackingUrl = this.currentAttempt.getTrackingUrl();
         clientToken = this.currentAttempt.getClientToken();
         clientToken = this.currentAttempt.getClientToken();
@@ -310,8 +335,8 @@ public class RMAppImpl implements RMApp {
       return BuilderUtils.newApplicationReport(this.applicationId, this.user,
       return BuilderUtils.newApplicationReport(this.applicationId, this.user,
           this.queue, this.name, host, rpcPort, clientToken,
           this.queue, this.name, host, rpcPort, clientToken,
           createApplicationState(this.stateMachine.getCurrentState()),
           createApplicationState(this.stateMachine.getCurrentState()),
-          this.diagnostics.toString(), trackingUrl, 
-          this.startTime, this.finishTime);
+          this.diagnostics.toString(), trackingUrl,
+          this.startTime, this.finishTime, finishState);
     } finally {
     } finally {
       this.readLock.unlock();
       this.readLock.unlock();
     }
     }
@@ -432,7 +457,7 @@ public class RMAppImpl implements RMApp {
     @SuppressWarnings("unchecked")
     @SuppressWarnings("unchecked")
     @Override
     @Override
     public void transition(RMAppImpl app, RMAppEvent event) {
     public void transition(RMAppImpl app, RMAppEvent event) {
-      app.handler.handle(new RMAppAttemptEvent(app.currentAttempt.getAppAttemptId(), 
+      app.handler.handle(new RMAppAttemptEvent(app.currentAttempt.getAppAttemptId(),
           RMAppAttemptEventType.KILL));
           RMAppAttemptEventType.KILL));
       super.transition(app, event);
       super.transition(app, event);
     }
     }
@@ -465,7 +490,7 @@ public class RMAppImpl implements RMApp {
       }
       }
       app.finishTime = System.currentTimeMillis();
       app.finishTime = System.currentTimeMillis();
       app.handler.handle(
       app.handler.handle(
-          new RMAppManagerEvent(app.applicationId, 
+          new RMAppManagerEvent(app.applicationId,
           RMAppManagerEventType.APP_COMPLETED));
           RMAppManagerEventType.APP_COMPLETED));
     };
     };
   }
   }
@@ -481,7 +506,7 @@ public class RMAppImpl implements RMApp {
 
 
     @Override
     @Override
     public RMAppState transition(RMAppImpl app, RMAppEvent event) {
     public RMAppState transition(RMAppImpl app, RMAppEvent event) {
-      
+
       RMAppFailedAttemptEvent failedEvent = ((RMAppFailedAttemptEvent)event);
       RMAppFailedAttemptEvent failedEvent = ((RMAppFailedAttemptEvent)event);
       if (app.attempts.size() == app.maxRetries) {
       if (app.attempts.size() == app.maxRetries) {
         String msg = "Application " + app.getApplicationId()
         String msg = "Application " + app.getApplicationId()
@@ -495,7 +520,7 @@ public class RMAppImpl implements RMApp {
         return RMAppState.FAILED;
         return RMAppState.FAILED;
       }
       }
 
 
-      app.createNewAttempt();     
+      app.createNewAttempt();
       return initialState;
       return initialState;
     }
     }
 
 

+ 12 - 10
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java

@@ -22,6 +22,7 @@ import java.util.List;
 import java.util.Set;
 import java.util.Set;
 
 
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerStatus;
 import org.apache.hadoop.yarn.api.records.ContainerStatus;
@@ -32,8 +33,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
 
 
 /**
 /**
  * Interface to an Application Attempt in the Resource Manager.
  * Interface to an Application Attempt in the Resource Manager.
- * A {@link RMApp} can have multiple app attempts based on 
- * {@link YarnConfiguration#RM_AM_MAX_RETRIES}. For specific 
+ * A {@link RMApp} can have multiple app attempts based on
+ * {@link YarnConfiguration#RM_AM_MAX_RETRIES}. For specific
  * implementation take a look at {@link RMAppAttemptImpl}.
  * implementation take a look at {@link RMAppAttemptImpl}.
  */
  */
 public interface RMAppAttempt extends EventHandler<RMAppAttemptEvent> {
 public interface RMAppAttempt extends EventHandler<RMAppAttemptEvent> {
@@ -49,7 +50,7 @@ public interface RMAppAttempt extends EventHandler<RMAppAttemptEvent> {
    * @return the state {@link RMAppAttemptState} of this {@link RMAppAttempt}
    * @return the state {@link RMAppAttemptState} of this {@link RMAppAttempt}
    */
    */
   RMAppAttemptState getAppAttemptState();
   RMAppAttemptState getAppAttemptState();
-  
+
   /**
   /**
    * The host on which the {@link RMAppAttempt} is running/ran on.
    * The host on which the {@link RMAppAttempt} is running/ran on.
    * @return the host on which the {@link RMAppAttempt} ran/is running on.
    * @return the host on which the {@link RMAppAttempt} ran/is running on.
@@ -88,11 +89,12 @@ public interface RMAppAttempt extends EventHandler<RMAppAttemptEvent> {
   float getProgress();
   float getProgress();
 
 
   /**
   /**
-   * The final state set by the AM.
-   * @return the final state that is set by the AM when unregistering itself.
+   * The final status set by the AM.
+   * @return the final status that is set by the AM when unregistering itself. Can return a null 
+   * if the AM has not unregistered itself. 
    */
    */
-  String getAMFinalState();
-  
+  FinalApplicationStatus getFinalApplicationStatus();
+
   /**
   /**
    * Nodes on which the containers for this {@link RMAppAttempt} ran.
    * Nodes on which the containers for this {@link RMAppAttempt} ran.
    * @return the set of nodes that ran any containers from this {@link RMAppAttempt}
    * @return the set of nodes that ran any containers from this {@link RMAppAttempt}
@@ -100,16 +102,16 @@ public interface RMAppAttempt extends EventHandler<RMAppAttemptEvent> {
   Set<NodeId> getRanNodes();
   Set<NodeId> getRanNodes();
 
 
   /**
   /**
-   * Return a list of the last set of finished containers, resetting the 
+   * Return a list of the last set of finished containers, resetting the
    * finished containers to empty.
    * finished containers to empty.
    * @return the list of just finished containers, re setting the finished containers.
    * @return the list of just finished containers, re setting the finished containers.
    */
    */
   List<ContainerStatus> pullJustFinishedContainers();
   List<ContainerStatus> pullJustFinishedContainers();
 
 
   /**
   /**
-   * Return the list of last set of finished containers. This does not reset the 
+   * Return the list of last set of finished containers. This does not reset the
    * finished containers.
    * finished containers.
-   * @return the list of just finished contianers, this does not reset the 
+   * @return the list of just finished contianers, this does not reset the
    * finished containers.
    * finished containers.
    */
    */
   List<ContainerStatus> getJustFinishedContainers();
   List<ContainerStatus> getJustFinishedContainers();

+ 30 - 28
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java

@@ -31,6 +31,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
@@ -99,9 +100,9 @@ public class RMAppAttemptImpl implements RMAppAttempt {
   private final ApplicationSubmissionContext submissionContext;
   private final ApplicationSubmissionContext submissionContext;
 
 
   //nodes on while this attempt's containers ran
   //nodes on while this attempt's containers ran
-  private final Set<NodeId> ranNodes = 
+  private final Set<NodeId> ranNodes =
     new HashSet<NodeId>();
     new HashSet<NodeId>();
-  private final List<ContainerStatus> justFinishedContainers = 
+  private final List<ContainerStatus> justFinishedContainers =
     new ArrayList<ContainerStatus>();
     new ArrayList<ContainerStatus>();
   private Container masterContainer;
   private Container masterContainer;
 
 
@@ -109,7 +110,9 @@ public class RMAppAttemptImpl implements RMAppAttempt {
   private String host = "N/A";
   private String host = "N/A";
   private int rpcPort;
   private int rpcPort;
   private String trackingUrl = "N/A";
   private String trackingUrl = "N/A";
-  private String finalState = "N/A";
+  // Set to null initially. Will eventually get set 
+  // if an RMAppAttemptUnregistrationEvent occurs
+  private FinalApplicationStatus finalStatus = null;
   private final StringBuilder diagnostics = new StringBuilder();
   private final StringBuilder diagnostics = new StringBuilder();
 
 
   private static final StateMachineFactory<RMAppAttemptImpl,
   private static final StateMachineFactory<RMAppAttemptImpl,
@@ -150,7 +153,7 @@ public class RMAppAttemptImpl implements RMAppAttempt {
       .addTransition(RMAppAttemptState.ALLOCATED,
       .addTransition(RMAppAttemptState.ALLOCATED,
           RMAppAttemptState.ALLOCATED,
           RMAppAttemptState.ALLOCATED,
           RMAppAttemptEventType.CONTAINER_ACQUIRED,
           RMAppAttemptEventType.CONTAINER_ACQUIRED,
-          new ContainerAcquiredTransition())       
+          new ContainerAcquiredTransition())
       .addTransition(RMAppAttemptState.ALLOCATED, RMAppAttemptState.LAUNCHED,
       .addTransition(RMAppAttemptState.ALLOCATED, RMAppAttemptState.LAUNCHED,
           RMAppAttemptEventType.LAUNCHED, new AMLaunchedTransition())
           RMAppAttemptEventType.LAUNCHED, new AMLaunchedTransition())
       .addTransition(RMAppAttemptState.ALLOCATED, RMAppAttemptState.FAILED,
       .addTransition(RMAppAttemptState.ALLOCATED, RMAppAttemptState.FAILED,
@@ -266,12 +269,12 @@ public class RMAppAttemptImpl implements RMAppAttempt {
   public ApplicationSubmissionContext getSubmissionContext() {
   public ApplicationSubmissionContext getSubmissionContext() {
     return this.submissionContext;
     return this.submissionContext;
   }
   }
-  
+
   @Override
   @Override
-  public String getAMFinalState() {
+  public FinalApplicationStatus getFinalApplicationStatus() {
     this.readLock.lock();
     this.readLock.lock();
     try {
     try {
-      return this.finalState;
+      return this.finalStatus;
     } finally {
     } finally {
       this.readLock.unlock();
       this.readLock.unlock();
     }
     }
@@ -430,9 +433,9 @@ public class RMAppAttemptImpl implements RMAppAttempt {
 
 
     @Override
     @Override
     public void transition(RMAppAttemptImpl appAttempt,
     public void transition(RMAppAttemptImpl appAttempt,
-        RMAppAttemptEvent event) {      
+        RMAppAttemptEvent event) {
     }
     }
-    
+
   }
   }
 
 
   private static final class AttemptStartedTransition extends BaseTransition {
   private static final class AttemptStartedTransition extends BaseTransition {
@@ -459,23 +462,23 @@ public class RMAppAttemptImpl implements RMAppAttempt {
         RMAppAttemptEvent event) {
         RMAppAttemptEvent event) {
 
 
       RMAppAttemptRejectedEvent rejectedEvent = (RMAppAttemptRejectedEvent) event;
       RMAppAttemptRejectedEvent rejectedEvent = (RMAppAttemptRejectedEvent) event;
-      
+
       // Save the diagnostic message
       // Save the diagnostic message
       String message = rejectedEvent.getMessage();
       String message = rejectedEvent.getMessage();
       appAttempt.setDiagnostics(message);
       appAttempt.setDiagnostics(message);
-      
+
       // Send the rejection event to app
       // Send the rejection event to app
       appAttempt.eventHandler.handle(
       appAttempt.eventHandler.handle(
           new RMAppRejectedEvent(
           new RMAppRejectedEvent(
-              rejectedEvent.getApplicationAttemptId().getApplicationId(), 
+              rejectedEvent.getApplicationAttemptId().getApplicationId(),
               message)
               message)
           );
           );
     }
     }
   }
   }
 
 
-  private static final List<ContainerId> EMPTY_CONTAINER_RELEASE_LIST = 
+  private static final List<ContainerId> EMPTY_CONTAINER_RELEASE_LIST =
       new ArrayList<ContainerId>();
       new ArrayList<ContainerId>();
-  private static final List<ResourceRequest> EMPTY_CONTAINER_REQUEST_LIST = 
+  private static final List<ResourceRequest> EMPTY_CONTAINER_REQUEST_LIST =
     new ArrayList<ResourceRequest>();
     new ArrayList<ResourceRequest>();
 
 
   private static final class ScheduleTransition extends BaseTransition {
   private static final class ScheduleTransition extends BaseTransition {
@@ -540,23 +543,23 @@ public class RMAppAttemptImpl implements RMAppAttempt {
       switch (finalAttemptState) {
       switch (finalAttemptState) {
         case FINISHED:
         case FINISHED:
         {
         {
-          appEvent = 
+          appEvent =
               new RMAppEvent(applicationId, RMAppEventType.ATTEMPT_FINISHED);
               new RMAppEvent(applicationId, RMAppEventType.ATTEMPT_FINISHED);
         }
         }
         break;
         break;
         case KILLED:
         case KILLED:
         {
         {
-          appEvent = 
-              new RMAppFailedAttemptEvent(applicationId, 
-                  RMAppEventType.ATTEMPT_KILLED, 
+          appEvent =
+              new RMAppFailedAttemptEvent(applicationId,
+                  RMAppEventType.ATTEMPT_KILLED,
                   "Application killed by user.");
                   "Application killed by user.");
         }
         }
         break;
         break;
         case FAILED:
         case FAILED:
         {
         {
-          appEvent = 
-              new RMAppFailedAttemptEvent(applicationId, 
-                  RMAppEventType.ATTEMPT_FAILED, 
+          appEvent =
+              new RMAppFailedAttemptEvent(applicationId,
+                  RMAppEventType.ATTEMPT_FAILED,
                   appAttempt.getDiagnostics());
                   appAttempt.getDiagnostics());
         }
         }
         break;
         break;
@@ -566,7 +569,7 @@ public class RMAppAttemptImpl implements RMAppAttempt {
         }
         }
         break;
         break;
       }
       }
-      
+
       appAttempt.eventHandler.handle(appEvent);
       appAttempt.eventHandler.handle(appEvent);
       appAttempt.eventHandler.handle(new AppRemovedSchedulerEvent(appAttempt
       appAttempt.eventHandler.handle(new AppRemovedSchedulerEvent(appAttempt
           .getAppAttemptId(), finalAttemptState));
           .getAppAttemptId(), finalAttemptState));
@@ -657,7 +660,7 @@ public class RMAppAttemptImpl implements RMAppAttempt {
 
 
       RMAppAttemptContainerFinishedEvent finishEvent =
       RMAppAttemptContainerFinishedEvent finishEvent =
           ((RMAppAttemptContainerFinishedEvent)event);
           ((RMAppAttemptContainerFinishedEvent)event);
-      
+
       // UnRegister from AMLivelinessMonitor
       // UnRegister from AMLivelinessMonitor
       appAttempt.rmContext.getAMLivelinessMonitor().unregister(
       appAttempt.rmContext.getAMLivelinessMonitor().unregister(
           appAttempt.getAppAttemptId());
           appAttempt.getAppAttemptId());
@@ -666,7 +669,7 @@ public class RMAppAttemptImpl implements RMAppAttempt {
       ContainerStatus status = finishEvent.getContainerStatus();
       ContainerStatus status = finishEvent.getContainerStatus();
       appAttempt.diagnostics.append("AM Container for " +
       appAttempt.diagnostics.append("AM Container for " +
           appAttempt.getAppAttemptId() + " exited with " +
           appAttempt.getAppAttemptId() + " exited with " +
-          " exitCode: " + status.getExitStatus() + 
+          " exitCode: " + status.getExitStatus() +
           " due to: " +  status.getDiagnostics() + "." +
           " due to: " +  status.getDiagnostics() + "." +
           "Failing this attempt.");
           "Failing this attempt.");
 
 
@@ -730,10 +733,9 @@ public class RMAppAttemptImpl implements RMAppAttempt {
 
 
       RMAppAttemptUnregistrationEvent unregisterEvent
       RMAppAttemptUnregistrationEvent unregisterEvent
         = (RMAppAttemptUnregistrationEvent) event;
         = (RMAppAttemptUnregistrationEvent) event;
-      unregisterEvent.getFinalState();
       appAttempt.diagnostics.append(unregisterEvent.getDiagnostics());
       appAttempt.diagnostics.append(unregisterEvent.getDiagnostics());
       appAttempt.trackingUrl = unregisterEvent.getTrackingUrl();
       appAttempt.trackingUrl = unregisterEvent.getTrackingUrl();
-      appAttempt.finalState = unregisterEvent.getFinalState();
+      appAttempt.finalStatus = unregisterEvent.getFinalApplicationStatus();
 
 
       // Tell the app and the scheduler
       // Tell the app and the scheduler
       super.transition(appAttempt, event);
       super.transition(appAttempt, event);
@@ -761,7 +763,7 @@ public class RMAppAttemptImpl implements RMAppAttempt {
 
 
       RMAppAttemptContainerFinishedEvent containerFinishedEvent
       RMAppAttemptContainerFinishedEvent containerFinishedEvent
         = (RMAppAttemptContainerFinishedEvent) event;
         = (RMAppAttemptContainerFinishedEvent) event;
-      ContainerStatus containerStatus = 
+      ContainerStatus containerStatus =
           containerFinishedEvent.getContainerStatus();
           containerFinishedEvent.getContainerStatus();
 
 
       // Is this container the AmContainer? If the finished container is same as
       // Is this container the AmContainer? If the finished container is same as
@@ -771,7 +773,7 @@ public class RMAppAttemptImpl implements RMAppAttempt {
         // Setup diagnostic message
         // Setup diagnostic message
         appAttempt.diagnostics.append("AM Container for " +
         appAttempt.diagnostics.append("AM Container for " +
             appAttempt.getAppAttemptId() + " exited with " +
             appAttempt.getAppAttemptId() + " exited with " +
-            " exitCode: " + containerStatus.getExitStatus() + 
+            " exitCode: " + containerStatus.getExitStatus() +
             " due to: " +  containerStatus.getDiagnostics() + "." +
             " due to: " +  containerStatus.getDiagnostics() + "." +
             "Failing this attempt.");
             "Failing this attempt.");
 
 

+ 7 - 6
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/event/RMAppAttemptUnregistrationEvent.java

@@ -19,20 +19,21 @@
 package org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event;
 package org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event;
 
 
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
 
 
 public class RMAppAttemptUnregistrationEvent extends RMAppAttemptEvent {
 public class RMAppAttemptUnregistrationEvent extends RMAppAttemptEvent {
 
 
   private final String trackingUrl;
   private final String trackingUrl;
-  private final String finalState;
+  private final FinalApplicationStatus finalStatus;
   private final String diagnostics;
   private final String diagnostics;
 
 
   public RMAppAttemptUnregistrationEvent(ApplicationAttemptId appAttemptId,
   public RMAppAttemptUnregistrationEvent(ApplicationAttemptId appAttemptId,
-      String trackingUrl, String finalState, String diagnostics) {
+      String trackingUrl, FinalApplicationStatus finalStatus, String diagnostics) {
     super(appAttemptId, RMAppAttemptEventType.UNREGISTERED);
     super(appAttemptId, RMAppAttemptEventType.UNREGISTERED);
     this.trackingUrl = trackingUrl;
     this.trackingUrl = trackingUrl;
-    this.finalState = finalState;
+    this.finalStatus = finalStatus;
     this.diagnostics = diagnostics;
     this.diagnostics = diagnostics;
   }
   }
 
 
@@ -40,12 +41,12 @@ public class RMAppAttemptUnregistrationEvent extends RMAppAttemptEvent {
     return this.trackingUrl;
     return this.trackingUrl;
   }
   }
 
 
-  public String getFinalState() {
-    return this.finalState;
+  public FinalApplicationStatus getFinalApplicationStatus() {
+    return this.finalStatus;
   }
   }
 
 
   public String getDiagnostics() {
   public String getDiagnostics() {
     return this.diagnostics;
     return this.diagnostics;
   }
   }
 
 
-}
+}

+ 3 - 2
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java

@@ -50,6 +50,7 @@ class AppsBlock extends HtmlBlock {
             th(".name", "Name").
             th(".name", "Name").
             th(".queue", "Queue").
             th(".queue", "Queue").
             th(".state", "State").
             th(".state", "State").
+            th(".finalstatus", "FinalStatus").
             th(".progress", "Progress").
             th(".progress", "Progress").
             th(".ui", "Tracking UI").
             th(".ui", "Tracking UI").
             th(".note", "Note")._()._().
             th(".note", "Note")._()._().
@@ -70,8 +71,8 @@ class AppsBlock extends HtmlBlock {
           td(app.getUser().toString()).
           td(app.getUser().toString()).
           td(app.getName().toString()).
           td(app.getName().toString()).
           td(app.getQueue().toString()).
           td(app.getQueue().toString()).
-          td(app.getState() == RMAppState.FINISHED ? app.getAMFinalState() : 
-            app.getState().toString()).
+          td(app.getState().toString()).
+          td(app.getFinalApplicationStatus().toString()).
           td().
           td().
             br().$title(percent)._(). // for sorting
             br().$title(percent)._(). // for sorting
             div(_PROGRESSBAR).
             div(_PROGRESSBAR).

+ 2 - 3
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java

@@ -89,9 +89,8 @@ public class RmController extends Controller {
     ResponseInfo info = info("Application Overview").
     ResponseInfo info = info("Application Overview").
       _("User:", app.getUser()).
       _("User:", app.getUser()).
       _("Name:", app.getName()).
       _("Name:", app.getName()).
-      _("State:", (app.getState() == RMAppState.FINISHED ?
-        app.getAMFinalState() : app.getState().toString())
-      ).
+      _("State:", app.getState().toString()).
+      _("FinalStatus:", app.getFinalApplicationStatus().toString()).
       _("Started:", Times.format(app.getStartTime())).
       _("Started:", Times.format(app.getStartTime())).
       _("Elapsed:", StringUtils.formatTime(
       _("Elapsed:", StringUtils.formatTime(
         Times.elapsed(app.getStartTime(), app.getFinishTime()))).
         Times.elapsed(app.getStartTime(), app.getFinishTime()))).

+ 16 - 16
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockAM.java

@@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.records.AMResponse;
 import org.apache.hadoop.yarn.api.records.AMResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.Priority;
 import org.apache.hadoop.yarn.api.records.Priority;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.Resource;
@@ -47,11 +47,11 @@ public class MockAM {
   private final ApplicationAttemptId attemptId;
   private final ApplicationAttemptId attemptId;
   private final RMContext context;
   private final RMContext context;
   private final AMRMProtocol amRMProtocol;
   private final AMRMProtocol amRMProtocol;
-  
+
   private final List<ResourceRequest> requests = new ArrayList<ResourceRequest>();
   private final List<ResourceRequest> requests = new ArrayList<ResourceRequest>();
   private final List<ContainerId> releases = new ArrayList<ContainerId>();
   private final List<ContainerId> releases = new ArrayList<ContainerId>();
 
 
-  MockAM(RMContext context, AMRMProtocol amRMProtocol, 
+  MockAM(RMContext context, AMRMProtocol amRMProtocol,
       ApplicationAttemptId attemptId) {
       ApplicationAttemptId attemptId) {
     this.context = context;
     this.context = context;
     this.amRMProtocol = amRMProtocol;
     this.amRMProtocol = amRMProtocol;
@@ -85,7 +85,7 @@ public class MockAM {
     amRMProtocol.registerApplicationMaster(req);
     amRMProtocol.registerApplicationMaster(req);
   }
   }
 
 
-  public void addRequests(String[] hosts, int memory, int priority, 
+  public void addRequests(String[] hosts, int memory, int priority,
       int containers) throws Exception {
       int containers) throws Exception {
     requests.addAll(createReq(hosts, memory, priority, containers));
     requests.addAll(createReq(hosts, memory, priority, containers));
   }
   }
@@ -97,33 +97,33 @@ public class MockAM {
     return response;
     return response;
   }
   }
 
 
-  public AMResponse allocate( 
-      String host, int memory, int numContainers, 
+  public AMResponse allocate(
+      String host, int memory, int numContainers,
       List<ContainerId> releases) throws Exception {
       List<ContainerId> releases) throws Exception {
-    List reqs = createReq(new String[]{host}, memory, 1, numContainers);
+    List<ResourceRequest> reqs = createReq(new String[]{host}, memory, 1, numContainers);
     return allocate(reqs, releases);
     return allocate(reqs, releases);
   }
   }
 
 
-  public List<ResourceRequest> createReq(String[] hosts, int memory, int priority, 
+  public List<ResourceRequest> createReq(String[] hosts, int memory, int priority,
       int containers) throws Exception {
       int containers) throws Exception {
     List<ResourceRequest> reqs = new ArrayList<ResourceRequest>();
     List<ResourceRequest> reqs = new ArrayList<ResourceRequest>();
     for (String host : hosts) {
     for (String host : hosts) {
-      ResourceRequest hostReq = createResourceReq(host, memory, priority, 
+      ResourceRequest hostReq = createResourceReq(host, memory, priority,
           containers);
           containers);
       reqs.add(hostReq);
       reqs.add(hostReq);
-      ResourceRequest rackReq = createResourceReq("default-rack", memory, 
+      ResourceRequest rackReq = createResourceReq("default-rack", memory,
           priority, containers);
           priority, containers);
       reqs.add(rackReq);
       reqs.add(rackReq);
     }
     }
-    
-    ResourceRequest offRackReq = createResourceReq("*", memory, priority, 
+
+    ResourceRequest offRackReq = createResourceReq("*", memory, priority,
         containers);
         containers);
     reqs.add(offRackReq);
     reqs.add(offRackReq);
     return reqs;
     return reqs;
-    
+
   }
   }
 
 
-  public ResourceRequest createResourceReq(String resource, int memory, int priority, 
+  public ResourceRequest createResourceReq(String resource, int memory, int priority,
       int containers) throws Exception {
       int containers) throws Exception {
     ResourceRequest req = Records.newRecord(ResourceRequest.class);
     ResourceRequest req = Records.newRecord(ResourceRequest.class);
     req.setHostName(resource);
     req.setHostName(resource);
@@ -138,7 +138,7 @@ public class MockAM {
   }
   }
 
 
   public AMResponse allocate(
   public AMResponse allocate(
-      List<ResourceRequest> resourceRequest, List<ContainerId> releases) 
+      List<ResourceRequest> resourceRequest, List<ContainerId> releases)
       throws Exception {
       throws Exception {
     AllocateRequest req = BuilderUtils.newAllocateRequest(attemptId,
     AllocateRequest req = BuilderUtils.newAllocateRequest(attemptId,
         ++responseId, 0F, resourceRequest, releases);
         ++responseId, 0F, resourceRequest, releases);
@@ -151,7 +151,7 @@ public class MockAM {
     FinishApplicationMasterRequest req = Records.newRecord(FinishApplicationMasterRequest.class);
     FinishApplicationMasterRequest req = Records.newRecord(FinishApplicationMasterRequest.class);
     req.setAppAttemptId(attemptId);
     req.setAppAttemptId(attemptId);
     req.setDiagnostics("");
     req.setDiagnostics("");
-    req.setFinalState("");
+    req.setFinishApplicationStatus(FinalApplicationStatus.SUCCEEDED);
     req.setTrackingUrl("");
     req.setTrackingUrl("");
     amRMProtocol.finishApplicationMaster(req);
     amRMProtocol.finishApplicationMaster(req);
   }
   }

+ 12 - 6
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java

@@ -22,10 +22,11 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.yarn.MockApps;
 import org.apache.hadoop.yarn.MockApps;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationStatus;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -69,7 +70,7 @@ public abstract class MockAsm extends MockApps {
     }
     }
 
 
     @Override
     @Override
-    public ApplicationState getState() {
+    public YarnApplicationState getState() {
       throw new UnsupportedOperationException("Not supported yet.");
       throw new UnsupportedOperationException("Not supported yet.");
     }
     }
 
 
@@ -119,7 +120,7 @@ public abstract class MockAsm extends MockApps {
     }
     }
 
 
     @Override
     @Override
-    public void setState(ApplicationState state) {
+    public void setState(YarnApplicationState state) {
       throw new UnsupportedOperationException("Not supported yet.");
       throw new UnsupportedOperationException("Not supported yet.");
     }
     }
 
 
@@ -207,11 +208,11 @@ public abstract class MockAsm extends MockApps {
     }
     }
     @Override
     @Override
     public void handle(RMAppEvent event) {
     public void handle(RMAppEvent event) {
-      throw new UnsupportedOperationException("Not supported yet.");      
+      throw new UnsupportedOperationException("Not supported yet.");
     }
     }
 
 
     @Override
     @Override
-    public String getAMFinalState() {
+    public FinalApplicationStatus getFinalApplicationStatus() {
       throw new UnsupportedOperationException("Not supported yet.");
       throw new UnsupportedOperationException("Not supported yet.");
     }
     }
   }
   }
@@ -274,9 +275,14 @@ public abstract class MockAsm extends MockApps {
       public float getProgress() {
       public float getProgress() {
         return (float)Math.random();
         return (float)Math.random();
       }
       }
+      @Override
+      public FinalApplicationStatus getFinalApplicationStatus() {
+        return FinalApplicationStatus.UNDEFINED;
+      }
+      
     };
     };
   }
   }
-  
+
   public static List<RMApp> newApplications(int n) {
   public static List<RMApp> newApplications(int n) {
     List<RMApp> list = Lists.newArrayList();
     List<RMApp> list = Lists.newArrayList();
     for (int i = 0; i < n; ++i) {
     for (int i = 0; i < n; ++i) {

+ 5 - 5
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMLaunchFailure.java

@@ -32,7 +32,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Priority;
 import org.apache.hadoop.yarn.api.records.Priority;
@@ -65,7 +65,7 @@ public class TestAMLaunchFailure {
 //  private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 //  private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 //  ApplicationsManagerImpl asmImpl;
 //  ApplicationsManagerImpl asmImpl;
 //  YarnScheduler scheduler = new DummyYarnScheduler();
 //  YarnScheduler scheduler = new DummyYarnScheduler();
-//  ApplicationTokenSecretManager applicationTokenSecretManager = 
+//  ApplicationTokenSecretManager applicationTokenSecretManager =
 //    new ApplicationTokenSecretManager();
 //    new ApplicationTokenSecretManager();
 //  private ClientRMService clientService;
 //  private ClientRMService clientService;
 //
 //
@@ -98,7 +98,7 @@ public class TestAMLaunchFailure {
 //        , ApplicationStore appStore)
 //        , ApplicationStore appStore)
 //        throws IOException {
 //        throws IOException {
 //      // TODO Auto-generated method stub
 //      // TODO Auto-generated method stub
-//      
+//
 //    }
 //    }
 //
 //
 //    @Override
 //    @Override
@@ -199,7 +199,7 @@ public class TestAMLaunchFailure {
 //    conf.setLong(YarnConfiguration.AM_EXPIRY_INTERVAL, 3000L);
 //    conf.setLong(YarnConfiguration.AM_EXPIRY_INTERVAL, 3000L);
 //    conf.setInt(RMConfig.AM_MAX_RETRIES, 1);
 //    conf.setInt(RMConfig.AM_MAX_RETRIES, 1);
 //    asmImpl.init(conf);
 //    asmImpl.init(conf);
-//    asmImpl.start();  
+//    asmImpl.start();
 //  }
 //  }
 //
 //
 //  @After
 //  @After
@@ -221,7 +221,7 @@ public class TestAMLaunchFailure {
 //        .newRecordInstance(SubmitApplicationRequest.class);
 //        .newRecordInstance(SubmitApplicationRequest.class);
 //    request.setApplicationSubmissionContext(submissionContext);
 //    request.setApplicationSubmissionContext(submissionContext);
 //    clientService.submitApplication(request);
 //    clientService.submitApplication(request);
-//    AppAttempt application = context.getApplications().get(appID); 
+//    AppAttempt application = context.getApplications().get(appID);
 //
 //
 //    while (application.getState() != ApplicationState.FAILED) {
 //    while (application.getState() != ApplicationState.FAILED) {
 //      LOG.info("Waiting for application to go to FAILED state."
 //      LOG.info("Waiting for application to go to FAILED state."

+ 12 - 12
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMRestart.java

@@ -33,7 +33,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -75,7 +75,7 @@ public class TestAMRestart {
 //  private static final Log LOG = LogFactory.getLog(TestAMRestart.class);
 //  private static final Log LOG = LogFactory.getLog(TestAMRestart.class);
 //  ApplicationsManagerImpl appImpl;
 //  ApplicationsManagerImpl appImpl;
 //  RMContext asmContext = new RMContextImpl(new MemStore());
 //  RMContext asmContext = new RMContextImpl(new MemStore());
-//  ApplicationTokenSecretManager appTokenSecretManager = 
+//  ApplicationTokenSecretManager appTokenSecretManager =
 //    new ApplicationTokenSecretManager();
 //    new ApplicationTokenSecretManager();
 //  DummyResourceScheduler scheduler;
 //  DummyResourceScheduler scheduler;
 //  private ClientRMService clientRMService;
 //  private ClientRMService clientRMService;
@@ -90,7 +90,7 @@ public class TestAMRestart {
 //  int launcherLaunchCalled = 0;
 //  int launcherLaunchCalled = 0;
 //  int launcherCleanupCalled = 0;
 //  int launcherCleanupCalled = 0;
 //  private final static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 //  private final static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
-//  
+//
 //  private class ExtApplicationsManagerImpl extends ApplicationsManagerImpl {
 //  private class ExtApplicationsManagerImpl extends ApplicationsManagerImpl {
 //    public ExtApplicationsManagerImpl(
 //    public ExtApplicationsManagerImpl(
 //        ApplicationTokenSecretManager applicationTokenSecretManager,
 //        ApplicationTokenSecretManager applicationTokenSecretManager,
@@ -115,7 +115,7 @@ public class TestAMRestart {
 //            LOG.info("DEBUG -- waiting for launch");
 //            LOG.info("DEBUG -- waiting for launch");
 //            synchronized(launchNotify) {
 //            synchronized(launchNotify) {
 //              while (launchNotify.get() == 0) {
 //              while (launchNotify.get() == 0) {
-//                try { 
+//                try {
 //                  launchNotify.wait();
 //                  launchNotify.wait();
 //                } catch (InterruptedException e) {
 //                } catch (InterruptedException e) {
 //                }
 //                }
@@ -151,11 +151,11 @@ public class TestAMRestart {
 //  }
 //  }
 //
 //
 //  private class DummyResourceScheduler implements ResourceScheduler {
 //  private class DummyResourceScheduler implements ResourceScheduler {
-//   
+//
 //    @Override
 //    @Override
 //    public void removeNode(RMNode node) {
 //    public void removeNode(RMNode node) {
 //    }
 //    }
-//    
+//
 //    @Override
 //    @Override
 //    public Allocation allocate(ApplicationId applicationId,
 //    public Allocation allocate(ApplicationId applicationId,
 //        List<ResourceRequest> ask, List<Container> release) throws IOException {
 //        List<ResourceRequest> ask, List<Container> release) throws IOException {
@@ -222,7 +222,7 @@ public class TestAMRestart {
 //
 //
 //    @Override
 //    @Override
 //    public void nodeUpdate(RMNode nodeInfo,
 //    public void nodeUpdate(RMNode nodeInfo,
-//        Map<String, List<Container>> containers) {      
+//        Map<String, List<Container>> containers) {
 //    }
 //    }
 //
 //
 //    @Override
 //    @Override
@@ -253,7 +253,7 @@ public class TestAMRestart {
 //    asmContext.getDispatcher().start();
 //    asmContext.getDispatcher().start();
 //    asmContext.getDispatcher().register(ApplicationTrackerEventType.class, scheduler);
 //    asmContext.getDispatcher().register(ApplicationTrackerEventType.class, scheduler);
 //    appImpl = new ExtApplicationsManagerImpl(appTokenSecretManager, scheduler, asmContext);
 //    appImpl = new ExtApplicationsManagerImpl(appTokenSecretManager, scheduler, asmContext);
-//    
+//
 //    conf.setLong(YarnConfiguration.AM_EXPIRY_INTERVAL, 1000L);
 //    conf.setLong(YarnConfiguration.AM_EXPIRY_INTERVAL, 1000L);
 //    conf.setInt(RMConfig.AM_MAX_RETRIES, maxFailures);
 //    conf.setInt(RMConfig.AM_MAX_RETRIES, maxFailures);
 //    appImpl.init(conf);
 //    appImpl.init(conf);
@@ -261,7 +261,7 @@ public class TestAMRestart {
 //
 //
 //    this.clientRMService = new ClientRMService(asmContext, appImpl
 //    this.clientRMService = new ClientRMService(asmContext, appImpl
 //        .getAmLivelinessMonitor(), appImpl.getClientToAMSecretManager(),
 //        .getAmLivelinessMonitor(), appImpl.getClientToAMSecretManager(),
-//        scheduler); 
+//        scheduler);
 //    this.clientRMService.init(conf);
 //    this.clientRMService.init(conf);
 //  }
 //  }
 //
 //
@@ -269,7 +269,7 @@ public class TestAMRestart {
 //  public void tearDown() {
 //  public void tearDown() {
 //  }
 //  }
 //
 //
-//  private void waitForFailed(AppAttempt application, ApplicationState 
+//  private void waitForFailed(AppAttempt application, ApplicationState
 //      finalState) throws Exception {
 //      finalState) throws Exception {
 //    int count = 0;
 //    int count = 0;
 //    while(application.getState() != finalState && count < 10) {
 //    while(application.getState() != finalState && count < 10) {
@@ -292,7 +292,7 @@ public class TestAMRestart {
 //        .newRecordInstance(SubmitApplicationRequest.class);
 //        .newRecordInstance(SubmitApplicationRequest.class);
 //    request.setApplicationSubmissionContext(subContext);
 //    request.setApplicationSubmissionContext(subContext);
 //    clientRMService.submitApplication(request);
 //    clientRMService.submitApplication(request);
-//    AppAttempt application = asmContext.getApplications().get(appID); 
+//    AppAttempt application = asmContext.getApplications().get(appID);
 //    synchronized (schedulerNotify) {
 //    synchronized (schedulerNotify) {
 //      while(schedulerNotify.get() == 0) {
 //      while(schedulerNotify.get() == 0) {
 //        schedulerNotify.wait();
 //        schedulerNotify.wait();
@@ -306,4 +306,4 @@ public class TestAMRestart {
 //    waitForFailed(application, ApplicationState.FAILED);
 //    waitForFailed(application, ApplicationState.FAILED);
 //    stop = true;
 //    stop = true;
 //  }
 //  }
-}
+}

+ 7 - 7
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestASMStateMachine.java

@@ -26,7 +26,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.event.EventHandler;
@@ -152,7 +152,7 @@ public class TestASMStateMachine {
 //    }
 //    }
 //  }
 //  }
 //
 //
-//  private void waitForState( ApplicationState 
+//  private void waitForState( ApplicationState
 //      finalState, AppAttemptImpl masterInfo) throws Exception {
 //      finalState, AppAttemptImpl masterInfo) throws Exception {
 //    int count = 0;
 //    int count = 0;
 //    while(masterInfo.getState() != finalState && count < 10) {
 //    while(masterInfo.getState() != finalState && count < 10) {
@@ -160,10 +160,10 @@ public class TestASMStateMachine {
 //      count++;
 //      count++;
 //    }
 //    }
 //    Assert.assertEquals(finalState, masterInfo.getState());
 //    Assert.assertEquals(finalState, masterInfo.getState());
-//  } 
-//  
-//  /* Test the state machine. 
-//   * 
+//  }
+//
+//  /* Test the state machine.
+//   *
 //   */
 //   */
 //  @Test
 //  @Test
 //  public void testStateMachine() throws Exception {
 //  public void testStateMachine() throws Exception {
@@ -211,6 +211,6 @@ public class TestASMStateMachine {
 //    /* check if expiry doesnt make it failed */
 //    /* check if expiry doesnt make it failed */
 //    handler.handle(new ApplicationEvent(ApplicationEventType.EXPIRE,
 //    handler.handle(new ApplicationEvent(ApplicationEventType.EXPIRE,
 //        masterInfo.getApplicationID()));
 //        masterInfo.getApplicationID()));
-//    Assert.assertEquals(ApplicationState.COMPLETED, masterInfo.getState());   
+//    Assert.assertEquals(ApplicationState.COMPLETED, masterInfo.getState());
 //  }
 //  }
 }
 }

+ 12 - 12
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestApplicationMasterExpiry.java

@@ -26,7 +26,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.event.EventHandler;
@@ -44,21 +44,21 @@ import org.junit.Test;
 
 
 /**
 /**
  * A test case that tests the expiry of the application master.
  * A test case that tests the expiry of the application master.
- * More tests can be added to this. 
+ * More tests can be added to this.
  */
  */
 public class TestApplicationMasterExpiry {
 public class TestApplicationMasterExpiry {
 //  private static final Log LOG = LogFactory.getLog(TestApplicationMasterExpiry.class);
 //  private static final Log LOG = LogFactory.getLog(TestApplicationMasterExpiry.class);
 //  private static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 //  private static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
-//  
+//
 //  private final RMContext context = new RMContextImpl(new MemStore());
 //  private final RMContext context = new RMContextImpl(new MemStore());
 //  private AMLivelinessMonitor amLivelinessMonitor;
 //  private AMLivelinessMonitor amLivelinessMonitor;
-//  
+//
 //  @Before
 //  @Before
 //  public void setUp() {
 //  public void setUp() {
 //    new DummyApplicationTracker();
 //    new DummyApplicationTracker();
 //    new DummySN();
 //    new DummySN();
 //    new DummyLauncher();
 //    new DummyLauncher();
-//    new ApplicationEventTypeListener(); 
+//    new ApplicationEventTypeListener();
 //    Configuration conf = new Configuration();
 //    Configuration conf = new Configuration();
 //    context.getDispatcher().register(ApplicationEventType.class,
 //    context.getDispatcher().register(ApplicationEventType.class,
 //        new ResourceManager.ApplicationEventDispatcher(context));
 //        new ResourceManager.ApplicationEventDispatcher(context));
@@ -70,7 +70,7 @@ public class TestApplicationMasterExpiry {
 //    amLivelinessMonitor.init(conf);
 //    amLivelinessMonitor.init(conf);
 //    amLivelinessMonitor.start();
 //    amLivelinessMonitor.start();
 //  }
 //  }
-//  
+//
 //  private class DummyApplicationTracker implements EventHandler<ASMEvent<ApplicationTrackerEventType>> {
 //  private class DummyApplicationTracker implements EventHandler<ASMEvent<ApplicationTrackerEventType>> {
 //    DummyApplicationTracker() {
 //    DummyApplicationTracker() {
 //      context.getDispatcher().register(ApplicationTrackerEventType.class, this);
 //      context.getDispatcher().register(ApplicationTrackerEventType.class, this);
@@ -79,10 +79,10 @@ public class TestApplicationMasterExpiry {
 //    public void handle(ASMEvent<ApplicationTrackerEventType> event) {
 //    public void handle(ASMEvent<ApplicationTrackerEventType> event) {
 //    }
 //    }
 //  }
 //  }
-//  
+//
 //  private AtomicInteger expiry = new AtomicInteger();
 //  private AtomicInteger expiry = new AtomicInteger();
 //  private boolean expired = false;
 //  private boolean expired = false;
-//  
+//
 //  private class ApplicationEventTypeListener implements
 //  private class ApplicationEventTypeListener implements
 //      EventHandler<ApplicationEvent> {
 //      EventHandler<ApplicationEvent> {
 //    ApplicationEventTypeListener() {
 //    ApplicationEventTypeListener() {
@@ -100,7 +100,7 @@ public class TestApplicationMasterExpiry {
 //      }
 //      }
 //    }
 //    }
 //  }
 //  }
-// 
+//
 //  private class DummySN implements EventHandler<ASMEvent<SNEventType>> {
 //  private class DummySN implements EventHandler<ASMEvent<SNEventType>> {
 //    DummySN() {
 //    DummySN() {
 //      context.getDispatcher().register(SNEventType.class, this);
 //      context.getDispatcher().register(SNEventType.class, this);
@@ -109,7 +109,7 @@ public class TestApplicationMasterExpiry {
 //    public void handle(ASMEvent<SNEventType> event) {
 //    public void handle(ASMEvent<SNEventType> event) {
 //    }
 //    }
 //  }
 //  }
-//  
+//
 //  private class DummyLauncher implements EventHandler<ASMEvent<AMLauncherEventType>> {
 //  private class DummyLauncher implements EventHandler<ASMEvent<AMLauncherEventType>> {
 //    DummyLauncher() {
 //    DummyLauncher() {
 //      context.getDispatcher().register(AMLauncherEventType.class, this);
 //      context.getDispatcher().register(AMLauncherEventType.class, this);
@@ -118,8 +118,8 @@ public class TestApplicationMasterExpiry {
 //    public void handle(ASMEvent<AMLauncherEventType> event) {
 //    public void handle(ASMEvent<AMLauncherEventType> event) {
 //    }
 //    }
 //  }
 //  }
-//  
-//  private void waitForState(AppAttempt application, ApplicationState 
+//
+//  private void waitForState(AppAttempt application, ApplicationState
 //      finalState) throws Exception {
 //      finalState) throws Exception {
 //    int count = 0;
 //    int count = 0;
 //    while(application.getState() != finalState && count < 10) {
 //    while(application.getState() != finalState && count < 10) {

+ 9 - 9
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestSchedulerNegotiator.java

@@ -28,7 +28,7 @@ import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
 import org.apache.hadoop.yarn.api.records.ApplicationMaster;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -62,7 +62,7 @@ public class TestSchedulerNegotiator {
 //  private SchedulerNegotiator schedulerNegotiator;
 //  private SchedulerNegotiator schedulerNegotiator;
 //  private DummyScheduler scheduler;
 //  private DummyScheduler scheduler;
 //  private final int testNum = 99999;
 //  private final int testNum = 99999;
-//  
+//
 //  private final RMContext context = new RMContextImpl(new MemStore());
 //  private final RMContext context = new RMContextImpl(new MemStore());
 //  AppAttemptImpl masterInfo;
 //  AppAttemptImpl masterInfo;
 //  private EventHandler handler;
 //  private EventHandler handler;
@@ -79,13 +79,13 @@ public class TestSchedulerNegotiator {
 //      containers.add(container);
 //      containers.add(container);
 //      return new Allocation(containers, Resources.none());
 //      return new Allocation(containers, Resources.none());
 //    }
 //    }
-//  
-//  
+//
+//
 //    @Override
 //    @Override
 //    public void nodeUpdate(RMNode nodeInfo,
 //    public void nodeUpdate(RMNode nodeInfo,
 //        Map<String, List<Container>> containers) {
 //        Map<String, List<Container>> containers) {
 //    }
 //    }
-//    
+//
 //    @Override
 //    @Override
 //    public void removeNode(RMNode node) {
 //    public void removeNode(RMNode node) {
 //    }
 //    }
@@ -142,7 +142,7 @@ public class TestSchedulerNegotiator {
 //      return null;
 //      return null;
 //    }
 //    }
 //  }
 //  }
-//  
+//
 //  @Before
 //  @Before
 //  public void setUp() {
 //  public void setUp() {
 //    scheduler = new DummyScheduler();
 //    scheduler = new DummyScheduler();
@@ -153,12 +153,12 @@ public class TestSchedulerNegotiator {
 //    context.getDispatcher().init(conf);
 //    context.getDispatcher().init(conf);
 //    context.getDispatcher().start();
 //    context.getDispatcher().start();
 //  }
 //  }
-//  
+//
 //  @After
 //  @After
 //  public void tearDown() {
 //  public void tearDown() {
 //    schedulerNegotiator.stop();
 //    schedulerNegotiator.stop();
 //  }
 //  }
-//  
+//
 //  public void waitForState(ApplicationState state, AppAttemptImpl info) {
 //  public void waitForState(ApplicationState state, AppAttemptImpl info) {
 //    int count = 0;
 //    int count = 0;
 //    while (info.getState() != state && count < 100) {
 //    while (info.getState() != state && count < 100) {
@@ -184,7 +184,7 @@ public class TestSchedulerNegotiator {
 //    submissionContext.setApplicationId(recordFactory.newRecordInstance(ApplicationId.class));
 //    submissionContext.setApplicationId(recordFactory.newRecordInstance(ApplicationId.class));
 //    submissionContext.getApplicationId().setClusterTimestamp(System.currentTimeMillis());
 //    submissionContext.getApplicationId().setClusterTimestamp(System.currentTimeMillis());
 //    submissionContext.getApplicationId().setId(1);
 //    submissionContext.getApplicationId().setId(1);
-//    
+//
 //    masterInfo = new AppAttemptImpl(this.context, this.conf, "dummy",
 //    masterInfo = new AppAttemptImpl(this.context, this.conf, "dummy",
 //        submissionContext, "dummyClientToken", StoreFactory
 //        submissionContext, "dummyClientToken", StoreFactory
 //            .createVoidAppStore(), new AMLivelinessMonitor(context
 //            .createVoidAppStore(), new AMLivelinessMonitor(context

+ 3 - 2
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/MockRMApp.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.yarn.server.resourcemanager.rmapp;
 package org.apache.hadoop.yarn.server.resourcemanager.rmapp;
 
 
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.MockApps;
 import org.apache.hadoop.yarn.MockApps;
@@ -167,8 +168,8 @@ public class MockRMApp implements RMApp {
   }
   }
 
 
   @Override
   @Override
-  public String getAMFinalState() {
-    return "UNKNOWN";
+  public FinalApplicationStatus getFinalApplicationStatus() {
+    return FinalApplicationStatus.UNDEFINED;
   };
   };
 
 
 }
 }

+ 12 - 1
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java

@@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.MockApps;
 import org.apache.hadoop.yarn.MockApps;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.event.EventHandler;
@@ -192,10 +193,15 @@ public class TestRMAppTransitions {
   }
   }
 
 
   private static void assertAppState(RMAppState state, RMApp application) {
   private static void assertAppState(RMAppState state, RMApp application) {
-    Assert.assertEquals("application state should have been" + state, 
+    Assert.assertEquals("application state should have been " + state, 
         state, application.getState());
         state, application.getState());
   }
   }
 
 
+  private static void assertFinalAppStatus(FinalApplicationStatus status, RMApp application) {
+    Assert.assertEquals("Final application status should have been " + status, 
+        status, application.getFinalApplicationStatus());
+  }
+  
   // test to make sure times are set when app finishes
   // test to make sure times are set when app finishes
   private static void assertTimesAtFinish(RMApp application) {
   private static void assertTimesAtFinish(RMApp application) {
     assertStartTimeSet(application);
     assertStartTimeSet(application);
@@ -208,6 +214,7 @@ public class TestRMAppTransitions {
   private static void assertKilled(RMApp application) {
   private static void assertKilled(RMApp application) {
     assertTimesAtFinish(application);
     assertTimesAtFinish(application);
     assertAppState(RMAppState.KILLED, application);
     assertAppState(RMAppState.KILLED, application);
+    assertFinalAppStatus(FinalApplicationStatus.KILLED, application);
     StringBuilder diag = application.getDiagnostics();
     StringBuilder diag = application.getDiagnostics();
     Assert.assertEquals("application diagnostics is not correct",
     Assert.assertEquals("application diagnostics is not correct",
         "Application killed by user.", diag.toString());
         "Application killed by user.", diag.toString());
@@ -224,6 +231,7 @@ public class TestRMAppTransitions {
   private static void assertFailed(RMApp application, String regex) {
   private static void assertFailed(RMApp application, String regex) {
     assertTimesAtFinish(application);
     assertTimesAtFinish(application);
     assertAppState(RMAppState.FAILED, application);
     assertAppState(RMAppState.FAILED, application);
+    assertFinalAppStatus(FinalApplicationStatus.FAILED, application);
     StringBuilder diag = application.getDiagnostics();
     StringBuilder diag = application.getDiagnostics();
     Assert.assertTrue("application diagnostics is not correct",
     Assert.assertTrue("application diagnostics is not correct",
         diag.toString().matches(regex));
         diag.toString().matches(regex));
@@ -261,6 +269,7 @@ public class TestRMAppTransitions {
     application.handle(event);
     application.handle(event);
     assertStartTimeSet(application);
     assertStartTimeSet(application);
     assertAppState(RMAppState.RUNNING, application);
     assertAppState(RMAppState.RUNNING, application);
+    assertFinalAppStatus(FinalApplicationStatus.UNDEFINED, application);
     return application;
     return application;
   }
   }
 
 
@@ -273,6 +282,8 @@ public class TestRMAppTransitions {
     application.handle(event);
     application.handle(event);
     assertAppState(RMAppState.FINISHED, application);
     assertAppState(RMAppState.FINISHED, application);
     assertTimesAtFinish(application);
     assertTimesAtFinish(application);
+    // finished without a proper unregister implies failed
+    assertFinalAppStatus(FinalApplicationStatus.FAILED, application);
     return application;
     return application;
   }
   }
 
 

+ 105 - 0
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java

@@ -34,6 +34,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.event.EventHandler;
@@ -52,7 +53,9 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppFailedAttemptEve
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRejectedEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRejectedEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerAllocatedEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerAllocatedEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptLaunchFailedEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptLaunchFailedEvent;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRegistrationEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRejectedEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRejectedEvent;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUnregistrationEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer;
 import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
@@ -199,6 +202,7 @@ public class TestRMAppAttemptTransitions {
     assertNull(applicationAttempt.getMasterContainer());
     assertNull(applicationAttempt.getMasterContainer());
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0, applicationAttempt.getRanNodes().size());
     assertEquals(0, applicationAttempt.getRanNodes().size());
+    assertNull(applicationAttempt.getFinalApplicationStatus());
   }
   }
 
 
   /**
   /**
@@ -212,6 +216,7 @@ public class TestRMAppAttemptTransitions {
     assertNull(applicationAttempt.getMasterContainer());
     assertNull(applicationAttempt.getMasterContainer());
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0, applicationAttempt.getRanNodes().size());
     assertEquals(0, applicationAttempt.getRanNodes().size());
+    assertNull(applicationAttempt.getFinalApplicationStatus());
     
     
     // Check events
     // Check events
     verify(masterService).
     verify(masterService).
@@ -230,6 +235,7 @@ public class TestRMAppAttemptTransitions {
     assertNull(applicationAttempt.getMasterContainer());
     assertNull(applicationAttempt.getMasterContainer());
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0, applicationAttempt.getRanNodes().size());
     assertEquals(0, applicationAttempt.getRanNodes().size());
+    assertNull(applicationAttempt.getFinalApplicationStatus());
     
     
     // Check events
     // Check events
     verify(application).handle(any(RMAppRejectedEvent.class));
     verify(application).handle(any(RMAppRejectedEvent.class));
@@ -247,6 +253,7 @@ public class TestRMAppAttemptTransitions {
     assertEquals(amContainer, applicationAttempt.getMasterContainer());
     assertEquals(amContainer, applicationAttempt.getMasterContainer());
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0, applicationAttempt.getRanNodes().size());
     assertEquals(0, applicationAttempt.getRanNodes().size());
+    assertNull(applicationAttempt.getFinalApplicationStatus());
   }
   }
 
 
   /**
   /**
@@ -259,6 +266,7 @@ public class TestRMAppAttemptTransitions {
     assertNull(applicationAttempt.getMasterContainer());
     assertNull(applicationAttempt.getMasterContainer());
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
     assertEquals(0, applicationAttempt.getRanNodes().size());
     assertEquals(0, applicationAttempt.getRanNodes().size());
+    assertNull(applicationAttempt.getFinalApplicationStatus());
     
     
     // Check events
     // Check events
     verify(application).handle(any(RMAppEvent.class));
     verify(application).handle(any(RMAppEvent.class));
@@ -299,6 +307,49 @@ public class TestRMAppAttemptTransitions {
     verify(application, times(2)).handle(any(RMAppFailedAttemptEvent.class));
     verify(application, times(2)).handle(any(RMAppFailedAttemptEvent.class));
   }
   }
 
 
+  /**
+   * {@link RMAppAttemptState#LAUNCH}
+   */
+  private void testAppAttemptLaunchedState(Container container) {
+    assertEquals(RMAppAttemptState.LAUNCHED, 
+        applicationAttempt.getAppAttemptState());
+    assertEquals(container, applicationAttempt.getMasterContainer());
+    
+    // TODO - need to add more checks relevant to this state
+  }
+
+  /**
+   * {@link RMAppAttemptState#RUNNING}
+   */
+  private void testAppAttemptRunningState(Container container,
+      String host, int rpcPort, String trackingUrl) {
+    assertEquals(RMAppAttemptState.RUNNING, 
+        applicationAttempt.getAppAttemptState());
+    assertEquals(container, applicationAttempt.getMasterContainer());
+    assertEquals(host, applicationAttempt.getHost());
+    assertEquals(rpcPort, applicationAttempt.getRpcPort());
+    assertEquals(trackingUrl, applicationAttempt.getTrackingUrl());
+    
+    // TODO - need to add more checks relevant to this state
+  }
+
+  /**
+   * {@link RMAppAttemptState#FINISHED}
+   */
+  private void testAppAttemptFinishedState(Container container,
+      FinalApplicationStatus finalStatus, 
+      String trackingUrl, 
+      String diagnostics) {
+    assertEquals(RMAppAttemptState.FINISHED, 
+        applicationAttempt.getAppAttemptState());
+    assertEquals(diagnostics, applicationAttempt.getDiagnostics());
+    assertEquals(trackingUrl, applicationAttempt.getTrackingUrl());
+    assertEquals(0,applicationAttempt.getJustFinishedContainers().size());
+    assertEquals(container, applicationAttempt.getMasterContainer());
+    assertEquals(finalStatus, applicationAttempt.getFinalApplicationStatus());
+  }
+  
+  
   private void submitApplicationAttempt() {
   private void submitApplicationAttempt() {
     ApplicationAttemptId appAttemptId = applicationAttempt.getAppAttemptId();
     ApplicationAttemptId appAttemptId = applicationAttempt.getAppAttemptId();
     applicationAttempt.handle(
     applicationAttempt.handle(
@@ -339,6 +390,27 @@ public class TestRMAppAttemptTransitions {
     
     
     return container;
     return container;
   }
   }
+  
+  private void launchApplicationAttempt(Container container) {
+    applicationAttempt.handle(
+        new RMAppAttemptEvent(applicationAttempt.getAppAttemptId(), 
+            RMAppAttemptEventType.LAUNCHED));
+
+    testAppAttemptLaunchedState(container);    
+  }
+  
+  private void runApplicationAttempt(Container container,
+      String host, 
+      int rpcPort, 
+      String trackingUrl) {
+    applicationAttempt.handle(
+        new RMAppAttemptRegistrationEvent(
+            applicationAttempt.getAppAttemptId(),
+            host, rpcPort, trackingUrl));
+    
+    testAppAttemptRunningState(container, host, rpcPort, trackingUrl);
+  }
+    
 
 
   @Test
   @Test
   public void testNewToKilled() {
   public void testNewToKilled() {
@@ -400,4 +472,37 @@ public class TestRMAppAttemptTransitions {
     testAppAttemptFailedState(amContainer, diagnostics);
     testAppAttemptFailedState(amContainer, diagnostics);
   }
   }
   
   
+  @Test 
+  public void testUnregisterToKilledFinish() {
+    Container amContainer = allocateApplicationAttempt();
+    launchApplicationAttempt(amContainer);
+    runApplicationAttempt(amContainer, "host", 9999, "oldtrackingurl");
+    String trackingUrl = "newtrackingurl";
+    String diagnostics = "Killed by user";
+    FinalApplicationStatus finalStatus = FinalApplicationStatus.KILLED;
+    applicationAttempt.handle(
+        new RMAppAttemptUnregistrationEvent(
+            applicationAttempt.getAppAttemptId(), 
+            trackingUrl, finalStatus, diagnostics));
+    testAppAttemptFinishedState(amContainer, finalStatus,
+        trackingUrl, diagnostics);
+  }
+  
+  
+  @Test 
+  public void testUnregisterToSuccessfulFinish() {
+    Container amContainer = allocateApplicationAttempt();
+    launchApplicationAttempt(amContainer);
+    runApplicationAttempt(amContainer, "host", 9999, "oldtrackingurl");
+    String trackingUrl = "mytrackingurl";
+    String diagnostics = "Successful";
+    FinalApplicationStatus finalStatus = FinalApplicationStatus.SUCCEEDED;
+    applicationAttempt.handle(
+        new RMAppAttemptUnregistrationEvent(
+            applicationAttempt.getAppAttemptId(), 
+            trackingUrl, finalStatus, diagnostics));
+    testAppAttemptFinishedState(amContainer, finalStatus,
+        trackingUrl, diagnostics);
+  }
+  
 }
 }

+ 1 - 1
hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestQueueMetrics.java

@@ -25,7 +25,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import static org.apache.hadoop.test.MetricsAsserts.*;
 import static org.apache.hadoop.test.MetricsAsserts.*;
 import static org.apache.hadoop.test.MockitoMaker.*;
 import static org.apache.hadoop.test.MockitoMaker.*;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.server.resourcemanager.resource.Resource;
 import org.apache.hadoop.yarn.server.resourcemanager.resource.Resource;
 import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources;
 import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;