소스 검색

Merge -r 1166847:1166848 from trunk to branch. Fixes: HADOOP-7612

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1166852 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 13 년 전
부모
커밋
efb369ebb8
2개의 변경된 파일27개의 추가작업 그리고 52개의 파일을 삭제
  1. 24 52
      dev-support/test-patch.sh
  2. 3 0
      hadoop-common-project/hadoop-common/CHANGES.txt

+ 24 - 52
dev-support/test-patch.sh

@@ -64,6 +64,7 @@ printUsage() {
   echo "--findbugs-home=<path> Findbugs home directory (default FINDBUGS_HOME environment variable)"
   echo "--forrest-home=<path>  Forrest home directory (default FORREST_HOME environment variable)"
   echo "--dirty-workspace      Allow the local SVN workspace to have uncommitted changes"
+  echo "--run-tests            Run all tests below the base directory"
   echo
   echo "Jenkins-only options:"
   echo "--jenkins              Run by Jenkins (runs tests and posts results to JIRA)"
@@ -130,6 +131,9 @@ parseArgs() {
     --dirty-workspace)
       DIRTY_WORKSPACE=true
       ;;
+    --run-tests)
+      RUN_TESTS=true
+      ;;
     *)
       PATCH_OR_DEFECT=$i
       ;;
@@ -249,6 +253,18 @@ setup () {
   echo "======================================================================"
   echo ""
   echo ""
+  if [[ ! -d hadoop-common-project ]]; then
+    cd $bindir/..
+    echo "Compiling $(pwd)"
+    echo "$MVN clean test -DskipTests > $PATCH_DIR/trunkCompile.txt 2>&1"
+    $MVN clean test -DskipTests > $PATCH_DIR/trunkCompile.txt 2>&1
+    if [[ $? != 0 ]] ; then
+      echo "Top-level trunk compilation is broken?"
+      cleanupAndExit 1
+    fi
+    cd -
+  fi
+  echo "Compiling $(pwd)"
   echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
   $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
@@ -580,26 +596,12 @@ runTests () {
   echo ""
   echo ""
   
-  failed_tests=""
-  modules=$(findModules)
-  for module in $modules;
-  do
-    pushd $module
-      echo "    Running tests in $module"
-      ### Kill any rogue build processes from the last attempt
-      $PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
-
-      echo "$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess"
-      $MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess
-      if [[ $? != 0 ]] ; then
-        ### Find and format names of failed tests
-        module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
-        failed_tests="${failed_tests}
-${module_failed_tests}"
-      fi
-    popd
-  done
-  echo $failed_tests
+  echo "$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess"
+  $MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess
+  if [[ $? != 0 ]] ; then
+    ### Find and format names of failed tests
+    failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
+  fi
   
   if [[ -n "$failed_tests" ]] ; then
   
@@ -615,36 +617,6 @@ $failed_tests"
   return 0
 }
 
-###############################################################################
-### Find the modules changed by the patch
-
-findModules () {
-  # Come up with a list of changed files into $TMP
-  TMP=/tmp/tmp.paths.$$
-  $GREP '^+++\|^---' $PATCH_DIR/patch | cut -c '5-' | $GREP -v /dev/null | sort | uniq > $TMP
-
-  # if all of the lines start with a/ or b/, then this is a git patch that
-  # was generated without --no-prefix
-  if ! $GREP -qv '^a/\|^b/' $TMP ; then
-    sed -i -e 's,^[ab]/,,' $TMP
-  fi
-
-  PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
-
-  # if all of the lines start with hadoop-common-project/, hadoop-hdfs-project/, or hadoop-mapreduce-project/, this is
-  # relative to the hadoop root instead of the subproject root
-  if [[ "$PREFIX_DIRS" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-mapreduce-project)$ ]]; then
-    echo $PREFIX_DIRS
-    return 0
-  elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-mapreduce-project' ; then
-    echo $PREFIX_DIRS
-    return 0
-  fi
-  
-  # No modules found. Running from current directory.
-  echo .
-}
-
 ###############################################################################
 ### Run the test-contrib target
 runContribTests () {
@@ -820,8 +792,8 @@ checkFindbugsWarnings
 (( RESULT = RESULT + $? ))
 checkReleaseAuditWarnings
 (( RESULT = RESULT + $? ))
-### Do not call these when run by a developer 
-if [[ $JENKINS == "true" ]] ; then
+### Run tests for Jenkins or if explictly asked for by a developer
+if [[ $JENKINS == "true" || $RUN_TESTS == "true" ]] ; then
   runTests
   (( RESULT = RESULT + $? ))
   runContribTests

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -364,6 +364,9 @@ Release 0.23.0 - Unreleased
     HADOOP-7507. Allow ganglia metrics to include the metrics system tags
                  in the gmetric names. (Alejandro Abdelnur via todd)
 
+    HADOOP-7612. Change test-patch to run tests for all nested modules.
+    (tomwhite)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole