test-libhdfs.sh 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196
  1. #!/usr/bin/env bash
  2. #
  3. # Licensed to the Apache Software Foundation (ASF) under one or more
  4. # contributor license agreements. See the NOTICE file distributed with
  5. # this work for additional information regarding copyright ownership.
  6. # The ASF licenses this file to You under the Apache License, Version 2.0
  7. # (the "License"); you may not use this file except in compliance with
  8. # the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. #
  18. #
  19. # Note: This script depends on 8 environment variables to function correctly:
  20. # a) CLASSPATH
  21. # b) HADOOP_HOME
  22. # c) HADOOP_CONF_DIR
  23. # d) HADOOP_LOG_DIR
  24. # e) LIBHDFS_BUILD_DIR
  25. # f) LIBHDFS_INSTALL_DIR
  26. # g) OS_NAME
  27. # h) CLOVER_JAR
  28. # All these are passed by build.xml.
  29. #
  30. HDFS_TEST=hdfs_test
  31. HADOOP_LIB_DIR=$HADOOP_HOME/lib
  32. HADOOP_BIN_DIR=$HADOOP_HOME/bin
  33. COMMON_BUILD_DIR=$HADOOP_HOME/build/ivy/lib/Hadoop-Hdfs/common
  34. COMMON_JAR=$COMMON_BUILD_DIR/hadoop-common-0.22.0-SNAPSHOT.jar
  35. cat > $HADOOP_CONF_DIR/core-site.xml <<EOF
  36. <?xml version="1.0"?>
  37. <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  38. <configuration>
  39. <property>
  40. <name>hadoop.tmp.dir</name>
  41. <value>file:///$LIBHDFS_TEST_DIR</value>
  42. </property>
  43. <property>
  44. <name>fs.default.name</name>
  45. <value>hdfs://localhost:23000/</value>
  46. </property>
  47. </configuration>
  48. EOF
  49. cat > $HADOOP_CONF_DIR/hdfs-site.xml <<EOF
  50. <?xml version="1.0"?>
  51. <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  52. <configuration>
  53. <property>
  54. <name>dfs.replication</name>
  55. <value>1</value>
  56. </property>
  57. <property>
  58. <name>dfs.support.append</name>
  59. <value>true</value>
  60. </property>
  61. <property>
  62. <name>dfs.namenode.logging.level</name>
  63. <value>DEBUG</value>
  64. </property>
  65. </configuration>
  66. EOF
  67. cat > $HADOOP_CONF_DIR/slaves <<EOF
  68. localhost
  69. EOF
  70. # If we are running from the hdfs repo we need to make sure
  71. # HADOOP_BIN_DIR contains the common scripts.
  72. # If the bin directory does not and we've got a common jar extract its
  73. # bin directory to HADOOP_HOME/bin. The bin scripts hdfs-config.sh and
  74. # hadoop-config.sh assume the bin directory is named "bin" and that it
  75. # is located in HADOOP_HOME.
  76. unpacked_common_bin_dir=0
  77. if [ ! -f $HADOOP_BIN_DIR/hadoop-config.sh ]; then
  78. if [ -f $COMMON_JAR ]; then
  79. jar xf $COMMON_JAR bin.tgz
  80. tar xfz bin.tgz -C $HADOOP_BIN_DIR
  81. unpacked_common_bin_dir=1
  82. fi
  83. fi
  84. # Manipulate HADOOP_CONF_DIR too
  85. # which is necessary to circumvent bin/hadoop
  86. HADOOP_CONF_DIR=$HADOOP_CONF_DIR:$HADOOP_HOME/conf
  87. # set pid file dir so they are not written to /tmp
  88. export HADOOP_PID_DIR=$HADOOP_LOG_DIR
  89. # CLASSPATH initially contains $HADOOP_CONF_DIR
  90. CLASSPATH="${HADOOP_CONF_DIR}"
  91. CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
  92. # for developers, add Hadoop classes to CLASSPATH
  93. if [ -d "$HADOOP_HOME/build/classes" ]; then
  94. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
  95. fi
  96. if [ -d "$HADOOP_HOME/build/webapps" ]; then
  97. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
  98. fi
  99. if [ -d "$HADOOP_HOME/build/test/classes" ]; then
  100. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
  101. fi
  102. # add Clover jar file needed for code coverage runs
  103. CLASSPATH=${CLASSPATH}:${CLOVER_JAR};
  104. # so that filenames w/ spaces are handled correctly in loops below
  105. IFS=
  106. # add libs to CLASSPATH
  107. for f in $HADOOP_HOME/lib/*.jar; do
  108. CLASSPATH=${CLASSPATH}:$f;
  109. done
  110. for f in $HADOOP_HOME/*.jar; do
  111. CLASSPATH=${CLASSPATH}:$f
  112. done
  113. for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
  114. CLASSPATH=${CLASSPATH}:$f;
  115. done
  116. if [ -d "$COMMON_BUILD_DIR" ]; then
  117. CLASSPATH=$CLASSPATH:$COMMON_JAR
  118. for f in $COMMON_BUILD_DIR/*.jar; do
  119. CLASSPATH=${CLASSPATH}:$f;
  120. done
  121. fi
  122. # restore ordinary behaviour
  123. unset IFS
  124. findlibjvm () {
  125. javabasedir=$JAVA_HOME
  126. case $OS_NAME in
  127. cygwin* | mingw* | pw23* )
  128. lib_jvm_dir=`find $javabasedir -follow \( \
  129. \( -name client -type d -prune \) -o \
  130. \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  131. ;;
  132. aix*)
  133. lib_jvm_dir=`find $javabasedir \( \
  134. \( -name client -type d -prune \) -o \
  135. \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  136. if test -z "$lib_jvm_dir"; then
  137. lib_jvm_dir=`find $javabasedir \( \
  138. \( -name client -type d -prune \) -o \
  139. \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  140. fi
  141. ;;
  142. *)
  143. lib_jvm_dir=`find $javabasedir -follow \( \
  144. \( -name client -type d -prune \) -o \
  145. \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  146. if test -z "$lib_jvm_dir"; then
  147. lib_jvm_dir=`find $javabasedir -follow \( \
  148. \( -name client -type d -prune \) -o \
  149. \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  150. fi
  151. ;;
  152. esac
  153. echo $lib_jvm_dir
  154. }
  155. LIB_JVM_DIR=`findlibjvm`
  156. echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
  157. echo LIB_JVM_DIR = $LIB_JVM_DIR
  158. echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
  159. # Put delays to ensure hdfs is up and running and also shuts down
  160. # after the tests are complete
  161. cd $HADOOP_HOME
  162. echo Y | $HADOOP_BIN_DIR/hdfs namenode -format &&
  163. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start namenode && sleep 2
  164. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start datanode && sleep 2
  165. echo "Wait 30s for the datanode to start up..."
  166. sleep 30
  167. CLASSPATH=$CLASSPATH LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:" $LIBHDFS_BUILD_DIR/$HDFS_TEST
  168. BUILD_STATUS=$?
  169. sleep 3
  170. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs stop datanode && sleep 2
  171. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs stop namenode && sleep 2
  172. if [ $unpacked_common_bin_dir -eq 1 ]; then
  173. rm -rf bin.tgz
  174. fi
  175. echo exiting with $BUILD_STATUS
  176. exit $BUILD_STATUS