Browse Source

Merge r1446505 through r1448504 from trunk.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-2802@1448505 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 12 years ago
parent
commit
89d05b53d9

+ 19 - 1
dev-support/test-patch.sh

@@ -323,7 +323,7 @@ checkAuthor () {
 }
 }
 
 
 ###############################################################################
 ###############################################################################
-### Check for tests in the patch
+### Check for tests and their timeout in the patch
 checkTests () {
 checkTests () {
   echo ""
   echo ""
   echo ""
   echo ""
@@ -357,6 +357,24 @@ checkTests () {
   JIRA_COMMENT="$JIRA_COMMENT
   JIRA_COMMENT="$JIRA_COMMENT
 
 
     {color:green}+1 tests included{color}.  The patch appears to include $testReferences new or modified test files."
     {color:green}+1 tests included{color}.  The patch appears to include $testReferences new or modified test files."
+  echo ""
+  echo "======================================================================"
+  echo "======================================================================"
+  echo "    Checking if the tests have timeout assigned in this patch."
+  echo "======================================================================"
+  echo "======================================================================"
+  
+  nontimeoutTests=`cat $PATCH_DIR/patch | $AWK '{ printf "%s ", $0 }'  | $GREP --extended-regex --count '[ ]*\+[ ]*((@Test[\+ ]*[A-Za-z]+)|([\+ ]*@Test[ \+]*\([ \+]*\)[\ ]*\+?[ ]*[A-Za-z]+)|([\+ ]*@Test[\+ ]*\(exception[ \+]*=[ \+]*[A-Z.a-z0-9A-Z ]*\)))'`
+
+  if [[ $nontimeoutTests == 0 ]] ; then
+    JIRA_COMMENT="$JIRA_COMMENT
+
+    {color:green}+1 tests included appear to have a timeout.{color}"
+	return 1
+  fi
+  JIRA_COMMENT="$JIRA_COMMENT
+
+  {color:red}-1 one of tests included doesn't have a timeout.{color}"
   return 0
   return 0
 }
 }
 
 

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -150,6 +150,9 @@ Trunk (Unreleased)
 
 
     HADOOP-9218 Document the Rpc-wrappers used internally (sanjay Radia)
     HADOOP-9218 Document the Rpc-wrappers used internally (sanjay Radia)
 
 
+    HADOOP-9112. test-patch should -1 for @Tests without a timeout 
+    (Surenkumar Nihalani via bobby)
+
   BUG FIXES
   BUG FIXES
 
 
     HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)
     HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -299,6 +299,9 @@ Trunk (Unreleased)
     HDFS-4340. Update addBlock() to inculde inode id as additional argument.
     HDFS-4340. Update addBlock() to inculde inode id as additional argument.
     (Brandon Li via suresh)
     (Brandon Li via suresh)
 
 
+    HDFS-4502. JsonUtil.toFileStatus(..) should check if the fileId property
+    exists.  (Brandon Li via suresh)
+
 Release 2.0.4-beta - UNRELEASED
 Release 2.0.4-beta - UNRELEASED
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES

+ 3 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java

@@ -42,6 +42,7 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.server.namenode.INodeId;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -244,7 +245,8 @@ public class JsonUtil {
     final long mTime = (Long) m.get("modificationTime");
     final long mTime = (Long) m.get("modificationTime");
     final long blockSize = (Long) m.get("blockSize");
     final long blockSize = (Long) m.get("blockSize");
     final short replication = (short) (long) (Long) m.get("replication");
     final short replication = (short) (long) (Long) m.get("replication");
-    final long fileId = (Long) m.get("fileId");
+    final long fileId = m.containsKey("fileId") ? (Long) m.get("fileId")
+        : INodeId.GRANDFATHER_INODE_ID;
     return new HdfsFileStatus(len, type == PathType.DIRECTORY, replication,
     return new HdfsFileStatus(len, type == PathType.DIRECTORY, replication,
         blockSize, mTime, aTime, permission, owner, group,
         blockSize, mTime, aTime, permission, owner, group,
         symlink, DFSUtil.string2Bytes(localName), fileId);
         symlink, DFSUtil.string2Bytes(localName), fileId);

+ 7 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -152,6 +152,9 @@ Trunk (Unreleased)
     MAPREDUCE-4884. Streaming tests fail to start MiniMRCluster due to missing
     MAPREDUCE-4884. Streaming tests fail to start MiniMRCluster due to missing
     queue configuration. (Chris Nauroth via suresh)
     queue configuration. (Chris Nauroth via suresh)
 
 
+    MAPREDUCE-5012. Typo in javadoc for IdentityMapper class. (Adam Monsen
+    via suresh)
+
 Release 2.0.4-beta - UNRELEASED
 Release 2.0.4-beta - UNRELEASED
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES
@@ -720,6 +723,10 @@ Release 0.23.7 - UNRELEASED
     MAPREDUCE-4992. AM hangs in RecoveryService when recovering tasks with
     MAPREDUCE-4992. AM hangs in RecoveryService when recovering tasks with
     speculative attempts (Robert Parker via jlowe)
     speculative attempts (Robert Parker via jlowe)
 
 
+    MAPREDUCE-5009. Killing the Task Attempt slated for commit does not clear
+    the value from the Task commitAttempt member (Robert Parker via jeagles)
+
+
 Release 0.23.6 - UNRELEASED
 Release 0.23.6 - UNRELEASED
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES

+ 3 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java

@@ -857,6 +857,9 @@ public abstract class TaskImpl implements Task, EventHandler<TaskEvent> {
       if (task.successfulAttempt == null) {
       if (task.successfulAttempt == null) {
         task.addAndScheduleAttempt(Avataar.VIRGIN);
         task.addAndScheduleAttempt(Avataar.VIRGIN);
       }
       }
+      if ((task.commitAttempt != null) && (task.commitAttempt == taskAttemptId)) {
+    	task.commitAttempt = null;
+      }
     }
     }
   }
   }
 
 

+ 18 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java

@@ -491,7 +491,25 @@ public class TestTaskImpl {
     assert(mockTask.getProgress() == progress);
     assert(mockTask.getProgress() == progress);
         
         
   }
   }
+
   
   
+  @Test
+  public void testKillDuringTaskAttemptCommit() {
+    mockTask = createMockTask(TaskType.REDUCE);        
+    TaskId taskId = getNewTaskID();
+    scheduleTaskAttempt(taskId);
+    
+    launchTaskAttempt(getLastAttempt().getAttemptId());
+    updateLastAttemptState(TaskAttemptState.COMMIT_PENDING);
+    commitTaskAttempt(getLastAttempt().getAttemptId());
+
+    TaskAttemptId commitAttempt = getLastAttempt().getAttemptId();
+    updateLastAttemptState(TaskAttemptState.KILLED);
+    killRunningTaskAttempt(commitAttempt);
+
+    assertFalse(mockTask.canCommit(commitAttempt));
+  }
+
   @Test
   @Test
   public void testFailureDuringTaskAttemptCommit() {
   public void testFailureDuringTaskAttemptCommit() {
     mockTask = createMockTask(TaskType.MAP);        
     mockTask = createMockTask(TaskType.MAP);        

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/IdentityMapper.java

@@ -35,7 +35,7 @@ import org.apache.hadoop.mapred.MapReduceBase;
 public class IdentityMapper<K, V>
 public class IdentityMapper<K, V>
     extends MapReduceBase implements Mapper<K, V, K, V> {
     extends MapReduceBase implements Mapper<K, V, K, V> {
 
 
-  /** The identify function.  Input key/value pair is written directly to
+  /** The identity function.  Input key/value pair is written directly to
    * output.*/
    * output.*/
   public void map(K key, V val,
   public void map(K key, V val,
                   OutputCollector<K, V> output, Reporter reporter)
                   OutputCollector<K, V> output, Reporter reporter)

+ 6 - 0
hadoop-yarn-project/CHANGES.txt

@@ -316,6 +316,9 @@ Release 0.23.7 - UNRELEASED
     YARN-249. Capacity Scheduler web page should show list of active users per 
     YARN-249. Capacity Scheduler web page should show list of active users per 
     queue like it used to (in 1.x) (Ravi Prakash via tgraves)
     queue like it used to (in 1.x) (Ravi Prakash via tgraves)
 
 
+    YARN-236. RM should point tracking URL to RM web page when app fails to
+    start (Jason Lowe via jeagles)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     YARN-357. App submission should not be synchronized (daryn)
     YARN-357. App submission should not be synchronized (daryn)
@@ -331,6 +334,9 @@ Release 0.23.7 - UNRELEASED
     YARN-362. Unexpected extra results when using webUI table search (Ravi
     YARN-362. Unexpected extra results when using webUI table search (Ravi
     Prakash via jlowe)
     Prakash via jlowe)
 
 
+    YARN-400. RM can return null application resource usage report leading to 
+    NPE in client (Jason Lowe via tgraves)
+
 Release 0.23.6 - UNRELEASED
 Release 0.23.6 - UNRELEASED
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES

+ 7 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java

@@ -406,7 +406,8 @@ public class RMAppImpl implements RMApp, Recoverable {
       String host = UNAVAILABLE;
       String host = UNAVAILABLE;
       String origTrackingUrl = UNAVAILABLE;
       String origTrackingUrl = UNAVAILABLE;
       int rpcPort = -1;
       int rpcPort = -1;
-      ApplicationResourceUsageReport appUsageReport = null;
+      ApplicationResourceUsageReport appUsageReport =
+          DUMMY_APPLICATION_RESOURCE_USAGE_REPORT;
       FinalApplicationStatus finishState = getFinalApplicationStatus();
       FinalApplicationStatus finishState = getFinalApplicationStatus();
       String diags = UNAVAILABLE;
       String diags = UNAVAILABLE;
       if (allowAccess) {
       if (allowAccess) {
@@ -418,18 +419,17 @@ public class RMAppImpl implements RMApp, Recoverable {
           host = this.currentAttempt.getHost();
           host = this.currentAttempt.getHost();
           rpcPort = this.currentAttempt.getRpcPort();
           rpcPort = this.currentAttempt.getRpcPort();
           appUsageReport = currentAttempt.getApplicationResourceUsageReport();
           appUsageReport = currentAttempt.getApplicationResourceUsageReport();
-        } else {
-          currentApplicationAttemptId = 
-              BuilderUtils.newApplicationAttemptId(this.applicationId, 
-                  DUMMY_APPLICATION_ATTEMPT_NUMBER);
         }
         }
+
         diags = this.diagnostics.toString();
         diags = this.diagnostics.toString();
-      } else {
-        appUsageReport = DUMMY_APPLICATION_RESOURCE_USAGE_REPORT;
+      }
+
+      if (currentApplicationAttemptId == null) {
         currentApplicationAttemptId = 
         currentApplicationAttemptId = 
             BuilderUtils.newApplicationAttemptId(this.applicationId, 
             BuilderUtils.newApplicationAttemptId(this.applicationId, 
                 DUMMY_APPLICATION_ATTEMPT_NUMBER);
                 DUMMY_APPLICATION_ATTEMPT_NUMBER);
       }
       }
+
       return BuilderUtils.newApplicationReport(this.applicationId,
       return BuilderUtils.newApplicationReport(this.applicationId,
           currentApplicationAttemptId, this.user, this.queue,
           currentApplicationAttemptId, this.user, this.queue,
           this.name, host, rpcPort, clientToken,
           this.name, host, rpcPort, clientToken,

+ 9 - 0
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java

@@ -29,6 +29,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.MockApps;
 import org.apache.hadoop.yarn.MockApps;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl;
@@ -616,4 +617,12 @@ public class TestRMAppTransitions {
     assertTimesAtFinish(application);
     assertTimesAtFinish(application);
     assertAppState(RMAppState.KILLED, application);
     assertAppState(RMAppState.KILLED, application);
   }
   }
+
+  @Test
+  public void testGetAppReport() {
+    RMApp app = createNewTestApp(null);
+    assertAppState(RMAppState.NEW, app);
+    ApplicationReport report = app.createAndGetApplicationReport(true);
+    Assert.assertNotNull(report.getApplicationResourceUsageReport());
+  }
 }
 }

+ 6 - 18
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java

@@ -66,6 +66,7 @@ public class WebAppProxyServlet extends HttpServlet {
   public static final String PROXY_USER_COOKIE_NAME = "proxy-user";
   public static final String PROXY_USER_COOKIE_NAME = "proxy-user";
 
 
   private final List<TrackingUriPlugin> trackingUriPlugins;
   private final List<TrackingUriPlugin> trackingUriPlugins;
+  private final String rmAppPageUrlBase;
 
 
   private static class _ implements Hamlet._ {
   private static class _ implements Hamlet._ {
     //Empty
     //Empty
@@ -91,6 +92,8 @@ public class WebAppProxyServlet extends HttpServlet {
     this.trackingUriPlugins =
     this.trackingUriPlugins =
         conf.getInstances(YarnConfiguration.YARN_TRACKING_URL_GENERATOR,
         conf.getInstances(YarnConfiguration.YARN_TRACKING_URL_GENERATOR,
             TrackingUriPlugin.class);
             TrackingUriPlugin.class);
+    this.rmAppPageUrlBase = StringHelper.pjoin(
+        YarnConfiguration.getRMWebAppURL(conf), "cluster", "app");
   }
   }
 
 
   /**
   /**
@@ -291,25 +294,10 @@ public class WebAppProxyServlet extends HttpServlet {
       if (original != null) {
       if (original != null) {
         trackingUri = ProxyUriUtils.getUriFromAMUrl(original);
         trackingUri = ProxyUriUtils.getUriFromAMUrl(original);
       }
       }
+      // fallback to ResourceManager's app page if no tracking URI provided
       if(original == null || original.equals("N/A")) {
       if(original == null || original.equals("N/A")) {
-        String message;
-        switch(applicationReport.getFinalApplicationStatus()) {
-          case FAILED:
-          case KILLED:
-          case SUCCEEDED:
-            message =
-              "The requested application exited before setting a tracking URL.";
-            break;
-          case UNDEFINED:
-            message = "The requested application does not appear to be running "
-              +"yet, and has not set a tracking URL.";
-            break;
-          default:
-            //This should never happen, but just to be safe
-            message = "The requested application has not set a tracking URL.";
-            break;
-        }
-        notFound(resp, message);
+        resp.sendRedirect(resp.encodeRedirectURL(
+            StringHelper.pjoin(rmAppPageUrlBase, id.toString())));
         return;
         return;
       }
       }