Selaa lähdekoodia

Adding attempts page to the history server UI. Contributed by Vinod Kumar Vavilapalli.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/MR-279@1136308 13f79535-47bb-0310-9956-ffa450edef68
Vinod Kumar Vavilapalli 14 vuotta sitten
vanhempi
commit
22bd26dd1d

+ 2 - 0
mapreduce/CHANGES.txt

@@ -5,6 +5,8 @@ Trunk (unreleased changes)
 
     MAPREDUCE-279
 
+    Adding attempts page to the history server UI. (vinodkv
+
     Task diagnostic info made available on the AM UI. (vinodkv)
 
     MAPREDUCE-2598. Fix NPE and UI for JobHistory. (Siddharth Seth via llu)

+ 37 - 9
mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java

@@ -188,6 +188,9 @@ public abstract class TaskAttemptImpl implements
      .addTransition(TaskAttemptState.ASSIGNED, TaskAttemptState.RUNNING,
          TaskAttemptEventType.TA_CONTAINER_LAUNCHED,
          new LaunchedContainerTransition())
+     .addTransition(TaskAttemptState.ASSIGNED, TaskAttemptState.ASSIGNED,
+         TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+         DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
      .addTransition(TaskAttemptState.ASSIGNED, TaskAttemptState.FAILED,
          TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED,
          new DeallocateContainerTransition(TaskAttemptState.FAILED, false))
@@ -260,12 +263,16 @@ public abstract class TaskAttemptImpl implements
      .addTransition(TaskAttemptState.SUCCESS_CONTAINER_CLEANUP,
          TaskAttemptState.SUCCEEDED, TaskAttemptEventType.TA_CONTAINER_CLEANED,
          new SucceededTransition())
+     .addTransition(
+          TaskAttemptState.SUCCESS_CONTAINER_CLEANUP,
+          TaskAttemptState.SUCCESS_CONTAINER_CLEANUP,
+          TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+          DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
       // Ignore-able events
      .addTransition(TaskAttemptState.SUCCESS_CONTAINER_CLEANUP,
          TaskAttemptState.SUCCESS_CONTAINER_CLEANUP,
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_FAILMSG,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_TIMED_OUT,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED))
 
@@ -273,13 +280,16 @@ public abstract class TaskAttemptImpl implements
      .addTransition(TaskAttemptState.FAIL_CONTAINER_CLEANUP,
          TaskAttemptState.FAIL_TASK_CLEANUP,
          TaskAttemptEventType.TA_CONTAINER_CLEANED, new TaskCleanupTransition())
+     .addTransition(TaskAttemptState.FAIL_CONTAINER_CLEANUP,
+         TaskAttemptState.FAIL_CONTAINER_CLEANUP,
+         TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+         DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
       // Ignore-able events
      .addTransition(TaskAttemptState.FAIL_CONTAINER_CLEANUP,
          TaskAttemptState.FAIL_CONTAINER_CLEANUP,
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED,
              TaskAttemptEventType.TA_UPDATE,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_COMMIT_PENDING,
              TaskAttemptEventType.TA_CONTAINER_LAUNCHED,
              TaskAttemptEventType.TA_DONE,
@@ -290,6 +300,10 @@ public abstract class TaskAttemptImpl implements
      .addTransition(TaskAttemptState.KILL_CONTAINER_CLEANUP,
          TaskAttemptState.KILL_TASK_CLEANUP,
          TaskAttemptEventType.TA_CONTAINER_CLEANED, new TaskCleanupTransition())
+     .addTransition(TaskAttemptState.KILL_CONTAINER_CLEANUP,
+         TaskAttemptState.KILL_CONTAINER_CLEANUP,
+         TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+         DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
      // Ignore-able events
      .addTransition(
          TaskAttemptState.KILL_CONTAINER_CLEANUP,
@@ -297,7 +311,6 @@ public abstract class TaskAttemptImpl implements
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED,
              TaskAttemptEventType.TA_UPDATE,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_COMMIT_PENDING,
              TaskAttemptEventType.TA_CONTAINER_LAUNCHED,
              TaskAttemptEventType.TA_DONE,
@@ -309,13 +322,16 @@ public abstract class TaskAttemptImpl implements
      .addTransition(TaskAttemptState.FAIL_TASK_CLEANUP,
          TaskAttemptState.FAILED, TaskAttemptEventType.TA_CLEANUP_DONE,
          new FailedTransition())
+     .addTransition(TaskAttemptState.FAIL_TASK_CLEANUP,
+         TaskAttemptState.FAIL_TASK_CLEANUP,
+         TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+         DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
       // Ignore-able events
      .addTransition(TaskAttemptState.FAIL_TASK_CLEANUP,
          TaskAttemptState.FAIL_TASK_CLEANUP,
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED,
              TaskAttemptEventType.TA_UPDATE,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_COMMIT_PENDING,
              TaskAttemptEventType.TA_DONE,
              TaskAttemptEventType.TA_FAILMSG))
@@ -324,49 +340,61 @@ public abstract class TaskAttemptImpl implements
      .addTransition(TaskAttemptState.KILL_TASK_CLEANUP,
          TaskAttemptState.KILLED, TaskAttemptEventType.TA_CLEANUP_DONE,
          new KilledTransition())
+     .addTransition(TaskAttemptState.KILL_TASK_CLEANUP,
+         TaskAttemptState.KILL_TASK_CLEANUP,
+         TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+         DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
      // Ignore-able events
      .addTransition(TaskAttemptState.KILL_TASK_CLEANUP,
          TaskAttemptState.KILL_TASK_CLEANUP,
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED,
              TaskAttemptEventType.TA_UPDATE,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_COMMIT_PENDING,
              TaskAttemptEventType.TA_DONE,
              TaskAttemptEventType.TA_FAILMSG))
 
       // Transitions from SUCCEEDED
-      .addTransition(TaskAttemptState.SUCCEEDED, //only possible for map attempts
+     .addTransition(TaskAttemptState.SUCCEEDED, //only possible for map attempts
          TaskAttemptState.FAILED,
          TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURE,
          new TooManyFetchFailureTransition())
+     .addTransition(
+         TaskAttemptState.SUCCEEDED, TaskAttemptState.SUCCEEDED,
+         TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+         DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
      // Ignore-able events for SUCCEEDED state
      .addTransition(TaskAttemptState.SUCCEEDED,
          TaskAttemptState.SUCCEEDED,
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_FAILMSG,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED))
 
+     // Transitions from FAILED state
+     .addTransition(TaskAttemptState.FAILED, TaskAttemptState.FAILED,
+       TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+       DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
      // Ignore-able events for FAILED state
      .addTransition(TaskAttemptState.FAILED, TaskAttemptState.FAILED,
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_ASSIGNED,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED,
              TaskAttemptEventType.TA_UPDATE,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_CONTAINER_LAUNCHED,
              TaskAttemptEventType.TA_COMMIT_PENDING,
              TaskAttemptEventType.TA_DONE,
              TaskAttemptEventType.TA_FAILMSG))
 
+     // Transitions from KILLED state
+     .addTransition(TaskAttemptState.KILLED, TaskAttemptState.KILLED,
+         TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
+         DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
      // Ignore-able events for KILLED state
      .addTransition(TaskAttemptState.KILLED, TaskAttemptState.KILLED,
          EnumSet.of(TaskAttemptEventType.TA_KILL,
              TaskAttemptEventType.TA_ASSIGNED,
              TaskAttemptEventType.TA_CONTAINER_COMPLETED,
              TaskAttemptEventType.TA_UPDATE,
-             TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE,
              TaskAttemptEventType.TA_CONTAINER_LAUNCHED,
              TaskAttemptEventType.TA_COMMIT_PENDING,
              TaskAttemptEventType.TA_DONE,

+ 2 - 2
mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMParams.java

@@ -19,9 +19,9 @@
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
 /**
- * Params constants for the AM webapp
+ * Params constants for the AM webapp and the history webapp.
  */
-interface AMParams {
+public interface AMParams {
   static final String RM_WEB = "rm.web";
   static final String APP_ID = "app.id";
   static final String JOB_ID = "job.id";

+ 3 - 0
mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java

@@ -23,6 +23,8 @@ import static org.apache.hadoop.yarn.util.StringHelper.join;
 import java.util.Locale;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
@@ -35,6 +37,7 @@ import org.apache.hadoop.yarn.webapp.Controller;
 import com.google.inject.Inject;
 
 public class AppController extends Controller implements AMParams {
+  private static final Log LOG = LogFactory.getLog(AppController.class);
   final App app;
   
   protected AppController(App app, Configuration conf, RequestContext ctx,

+ 4 - 5
mapreduce/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobPage.java

@@ -18,13 +18,12 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
-import static org.apache.hadoop.yarn.webapp.Params.*;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
 
 import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
 
 public class JobPage extends AppView {
 

+ 0 - 9
mapreduce/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HSParams.java

@@ -1,9 +0,0 @@
-package org.apache.hadoop.mapreduce.v2.hs.webapp;
-
-interface HSParams {
-  static final String HS_WEB = "hs.web";
-  static final String APP_ID = "app.id";
-  static final String JOB_ID = "job.id";
-  static final String TASK_ID = "task.id";
-  static final String TASK_TYPE = "task.type";
-}

+ 25 - 3
mapreduce/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HSWebApp.java

@@ -1,11 +1,31 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
 package org.apache.hadoop.mapreduce.v2.hs.webapp;
 
+import static org.apache.hadoop.yarn.util.StringHelper.pajoin;
+
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.hs.HistoryClientService;
+import org.apache.hadoop.mapreduce.v2.app.webapp.AMParams;
 import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
 import org.apache.hadoop.yarn.webapp.WebApp;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
-public class HSWebApp extends WebApp implements HSParams {
+
+public class HSWebApp extends WebApp implements AMParams {
 
   private HistoryContext history;
 
@@ -21,6 +41,8 @@ public class HSWebApp extends WebApp implements HSParams {
     route(pajoin("/job", JOB_ID), HsController.class, "job");
     route(pajoin("/jobcounters", JOB_ID), HsController.class, "jobCounters");
     route(pajoin("/tasks", JOB_ID, TASK_TYPE), HsController.class, "tasks");
+    route(pajoin("/attempts", JOB_ID, TASK_TYPE, ATTEMPT_STATE),
+        HsController.class, "attempts");
     route(pajoin("/task", TASK_ID), HsController.class, "task");
   }
 }

+ 26 - 17
mapreduce/mr-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java

@@ -1,27 +1,30 @@
-package org.apache.hadoop.mapreduce.v2.hs.webapp;
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
-import java.util.Locale;
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
-import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.webapp.App;
 import org.apache.hadoop.mapreduce.v2.app.webapp.AppController;
-import org.apache.hadoop.mapreduce.v2.app.webapp.CountersPage;
-import org.apache.hadoop.mapreduce.v2.app.webapp.JobPage;
-import org.apache.hadoop.mapreduce.v2.app.webapp.TasksPage;
-import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.yarn.webapp.Controller;
-import org.apache.hadoop.mapreduce.v2.util.MRApps;
-
-import static org.apache.hadoop.yarn.util.StringHelper.join;
+
 import com.google.inject.Inject;
 
-public class HsController extends AppController implements HSParams {
+public class HsController extends AppController {
   
   @Inject HsController(App app, Configuration conf, RequestContext ctx) {
     super(app, conf, ctx, "History");
@@ -33,6 +36,8 @@ public class HsController extends AppController implements HSParams {
     setTitle("JobHistory");
   }
 
+  // Need all of these methods here also as Guice doesn't look into parent
+  // classes.
   public void job() {
     super.job();
   }
@@ -49,4 +54,8 @@ public class HsController extends AppController implements HSParams {
     super.task();
   }
 
+  @Override
+  public void attempts() {
+    super.attempts();
+  }
 }