Kaynağa Gözat

HADOOP-12493. bash unit tests are failing (aw)

Allen Wittenauer 9 yıl önce
ebeveyn
işleme
ded012e1d6

+ 6 - 1
hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh

@@ -512,7 +512,10 @@ function hadoop_basic_init
     exit 1
   fi
 
+  # if for some reason the shell doesn't have $USER defined
+  # let's define it as 'hadoop'
   HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
+  HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-hadoop}
   HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
   HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
   HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
@@ -963,7 +966,9 @@ function hadoop_add_to_classpath_userpath
       array[${c}]=${idx}
       ((c=c+1))
     done
-    ((j=c-1))
+
+    # bats gets confused by j getting set to 0
+    ((j=c-1)) || ${QATESTMODE}
 
     if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
       if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then

+ 4 - 4
hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats

@@ -72,7 +72,7 @@ createdirs () {
    HADOOP_USER_CLASSPATH_FIRST=""
    hadoop_add_to_classpath_userpath
    echo ">${CLASSPATH}<"
-   [ ${CLASSPATH} = "${TMP}/foo" ]
+   [ "${CLASSPATH}" = "${TMP}/foo" ]
 }
 
 @test "hadoop_add_to_classpath_userpath (3+2 after)" {
@@ -83,7 +83,7 @@ createdirs () {
    HADOOP_USER_CLASSPATH_FIRST=""
    hadoop_add_to_classpath_userpath
    echo ">${CLASSPATH}<"
-   [ ${CLASSPATH} = "${TMP}/foo:${TMP}/bar:${TMP}/baz:${TMP}/new:${TMP}/old" ]
+   [ "${CLASSPATH}" = "${TMP}/foo:${TMP}/bar:${TMP}/baz:${TMP}/new:${TMP}/old" ]
 }
 
 @test "hadoop_add_to_classpath_userpath (3+2 before)" {
@@ -94,5 +94,5 @@ createdirs () {
    HADOOP_USER_CLASSPATH_FIRST="true"
    hadoop_add_to_classpath_userpath
    echo ">${CLASSPATH}<"
-   [ ${CLASSPATH} = "${TMP}/new:${TMP}/old:${TMP}/foo:${TMP}/bar:${TMP}/baz" ]
-}
+   [ "${CLASSPATH}" = "${TMP}/new:${TMP}/old:${TMP}/foo:${TMP}/bar:${TMP}/baz" ]
+}

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats

@@ -69,7 +69,7 @@ check_var_values () {
     eval ${j}=${i}
     hadoop_basic_init
     echo "Verifying $j has >${i}< >${!j}<"
-    [ ${!j} = ${i} ]
+    [ "${!j}" = "${i}" ]
   done
 }
 
@@ -89,6 +89,6 @@ check_var_values () {
     hadoop_basic_init
     check_var_values
     echo "Verifying $j has foo >${!j}<"
-    [ ${j} = foo ]
+    [ "${j}" = "foo" ]
   done
 }

+ 4 - 4
hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats

@@ -32,20 +32,20 @@ load hadoop-functions_test_helper
 
 @test "hadoop_common_slave_mode_execute (--slaves 1)" {
   run  hadoop_common_slave_mode_execute --slaves command
-  [ ${output} = command ]
+  [ "${output}" = "command" ]
 }
 
 @test "hadoop_common_slave_mode_execute (--slaves 2)" {
   run  hadoop_common_slave_mode_execute --slaves command1 command2
-  [ ${output} = "command1 command2" ]
+  [ "${output}" = "command1 command2" ]
 }
 
 @test "hadoop_common_slave_mode_execute (--hosts)" {
   run  hadoop_common_slave_mode_execute --hosts filename command
-  [ ${output} = command ]
+  [ "${output}" = "command" ]
 }
 
 @test "hadoop_common_slave_mode_execute (--hostnames 2)" {
   run  hadoop_common_slave_mode_execute --hostnames "host1,host2" command1 command2
-  [ ${output} = "command1 command2" ]
+  [ "${output}" = "command1 command2" ]
 }