test-libhdfs.sh 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152
  1. #!/usr/bin/env bash
  2. #
  3. # Licensed to the Apache Software Foundation (ASF) under one or more
  4. # contributor license agreements. See the NOTICE file distributed with
  5. # this work for additional information regarding copyright ownership.
  6. # The ASF licenses this file to You under the Apache License, Version 2.0
  7. # (the "License"); you may not use this file except in compliance with
  8. # the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. #
  18. #
  19. # Note: This script depends on 5 environment variables to function correctly:
  20. # a) HADOOP_HOME - must be set
  21. # b) HDFS_TEST_CONF_DIR - optional; the directory to read and write
  22. # core-site.xml to. Defaults to /tmp
  23. # c) LIBHDFS_BUILD_DIR - optional; the location of the hdfs_test
  24. # executable. Defaults to the parent directory.
  25. # d) OS_NAME - used to choose how to locate libjvm.so
  26. # e) CLOVER_JAR - optional; the location of the Clover code coverage tool's jar.
  27. #
  28. if [ "x$HADOOP_HOME" == "x" ]; then
  29. echo "HADOOP_HOME is unset!"
  30. exit 1
  31. fi
  32. if [ "x$LIBHDFS_BUILD_DIR" == "x" ]; then
  33. LIBHDFS_BUILD_DIR=`pwd`/../
  34. fi
  35. if [ "x$HDFS_TEST_CONF_DIR" == "x" ]; then
  36. HDFS_TEST_CONF_DIR=/tmp
  37. fi
  38. # LIBHDFS_INSTALL_DIR is the directory containing libhdfs.so
  39. LIBHDFS_INSTALL_DIR=$HADOOP_HOME/lib/native/
  40. HDFS_TEST=hdfs_test
  41. HDFS_TEST_JAR=`find $HADOOP_HOME/share/hadoop/hdfs/ \
  42. -name "hadoop-hdfs-*-tests.jar" | head -n 1`
  43. if [ "x$HDFS_TEST_JAR" == "x" ]; then
  44. echo "HDFS test jar not found! Tried looking in all subdirectories \
  45. of $HADOOP_HOME/share/hadoop/hdfs/"
  46. exit 1
  47. fi
  48. echo "Found HDFS test jar at $HDFS_TEST_JAR"
  49. # CLASSPATH initially contains $HDFS_TEST_CONF_DIR
  50. CLASSPATH="${HDFS_TEST_CONF_DIR}"
  51. CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
  52. # add Clover jar file needed for code coverage runs
  53. CLASSPATH=${CLASSPATH}:${CLOVER_JAR};
  54. # so that filenames w/ spaces are handled correctly in loops below
  55. IFS=$'\n'
  56. JAR_DIRS="$HADOOP_HOME/share/hadoop/common/lib/
  57. $HADOOP_HOME/share/hadoop/common/
  58. $HADOOP_HOME/share/hadoop/hdfs
  59. $HADOOP_HOME/share/hadoop/hdfs/lib/"
  60. for d in $JAR_DIRS; do
  61. for j in $d/*.jar; do
  62. CLASSPATH=${CLASSPATH}:$j
  63. done;
  64. done;
  65. # restore ordinary behaviour
  66. unset IFS
  67. findlibjvm () {
  68. javabasedir=$JAVA_HOME
  69. case $OS_NAME in
  70. cygwin* | mingw* | pw23* )
  71. lib_jvm_dir=`find $javabasedir -follow \( \
  72. \( -name client -type d -prune \) -o \
  73. \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  74. ;;
  75. aix*)
  76. lib_jvm_dir=`find $javabasedir \( \
  77. \( -name client -type d -prune \) -o \
  78. \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  79. if test -z "$lib_jvm_dir"; then
  80. lib_jvm_dir=`find $javabasedir \( \
  81. \( -name client -type d -prune \) -o \
  82. \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  83. fi
  84. ;;
  85. *)
  86. lib_jvm_dir=`find $javabasedir -follow \( \
  87. \( -name client -type d -prune \) -o \
  88. \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  89. if test -z "$lib_jvm_dir"; then
  90. lib_jvm_dir=`find $javabasedir -follow \( \
  91. \( -name client -type d -prune \) -o \
  92. \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  93. fi
  94. ;;
  95. esac
  96. echo $lib_jvm_dir
  97. }
  98. LIB_JVM_DIR=`findlibjvm`
  99. echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
  100. echo LIB_JVM_DIR = $LIB_JVM_DIR
  101. echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
  102. # Put delays to ensure hdfs is up and running and also shuts down
  103. # after the tests are complete
  104. rm $HDFS_TEST_CONF_DIR/core-site.xml
  105. $HADOOP_HOME/bin/hadoop jar $HDFS_TEST_JAR \
  106. org.apache.hadoop.test.MiniDFSClusterManager \
  107. -format -nnport 20300 -writeConfig $HDFS_TEST_CONF_DIR/core-site.xml \
  108. > /tmp/libhdfs-test-cluster.out 2>&1 &
  109. MINI_CLUSTER_PID=$!
  110. for i in {1..15}; do
  111. echo "Waiting for DFS cluster, attempt $i of 15"
  112. [ -f $HDFS_TEST_CONF_DIR/core-site.xml ] && break;
  113. sleep 2
  114. done
  115. if [ ! -f $HDFS_TEST_CONF_DIR/core-site.xml ]; then
  116. echo "Cluster did not come up in 30s"
  117. kill -9 $MINI_CLUSTER_PID
  118. exit 1
  119. fi
  120. echo "Cluster up, running tests"
  121. # Disable error checking to make sure we get to cluster cleanup
  122. set +e
  123. CLASSPATH=$CLASSPATH \
  124. LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:" \
  125. $LIBHDFS_BUILD_DIR/$HDFS_TEST
  126. BUILD_STATUS=$?
  127. echo "Tearing cluster down"
  128. kill -9 $MINI_CLUSTER_PID
  129. echo "Exiting with $BUILD_STATUS"
  130. exit $BUILD_STATUS