test-libhdfs.sh 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. #
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. #
  17. #
  18. # Note: This script depends on 5 environment variables to function correctly:
  19. # a) CLASSPATH
  20. # b) HADOOP_HOME
  21. # c) HADOOP_CONF_DIR
  22. # d) HADOOP_LOG_DIR
  23. # e) LIBHDFS_BUILD_DIR
  24. # f) LIBHDFS_INSTALL_DIR
  25. # g) OS_NAME
  26. # All these are passed by build.xml.
  27. #
  28. HDFS_TEST=hdfs_test
  29. HADOOP_LIB_DIR=$HADOOP_HOME/lib
  30. HADOOP_BIN_DIR=$HADOOP_HOME/bin
  31. # Manipulate HADOOP_CONF_DIR too
  32. # which is necessary to circumvent bin/hadoop
  33. HADOOP_CONF_DIR=$HADOOP_CONF_DIR:$HADOOP_HOME/conf
  34. # set pid file dir so they are not written to /tmp
  35. export HADOOP_PID_DIR=$HADOOP_LOG_DIR
  36. # CLASSPATH initially contains $HADOOP_CONF_DIR
  37. CLASSPATH="${HADOOP_CONF_DIR}"
  38. CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
  39. # for developers, add Hadoop classes to CLASSPATH
  40. if [ -d "$HADOOP_HOME/build/classes" ]; then
  41. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
  42. fi
  43. if [ -d "$HADOOP_HOME/build/webapps" ]; then
  44. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
  45. fi
  46. if [ -d "$HADOOP_HOME/build/test/classes" ]; then
  47. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
  48. fi
  49. # so that filenames w/ spaces are handled correctly in loops below
  50. IFS=
  51. # add libs to CLASSPATH
  52. for f in $HADOOP_HOME/lib/*.jar; do
  53. CLASSPATH=${CLASSPATH}:$f;
  54. done
  55. for ff in $HADOOP_HOME/*.jar; do
  56. CLASSPATH=${CLASSPATH}:$ff
  57. done
  58. for f in $HADOOP_HOME/lib/jsp-2.0/*.jar; do
  59. CLASSPATH=${CLASSPATH}:$f;
  60. done
  61. if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
  62. for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
  63. CLASSPATH=${CLASSPATH}:$f;
  64. done
  65. fi
  66. # restore ordinary behaviour
  67. unset IFS
  68. findlibjvm () {
  69. javabasedir=$JAVA_HOME
  70. case $OS_NAME in
  71. cygwin* | mingw* | pw23* )
  72. lib_jvm_dir=`find $javabasedir -follow \( \
  73. \( -name client -type d -prune \) -o \
  74. \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  75. ;;
  76. aix*)
  77. lib_jvm_dir=`find $javabasedir \( \
  78. \( -name client -type d -prune \) -o \
  79. \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  80. if test -z "$lib_jvm_dir"; then
  81. lib_jvm_dir=`find $javabasedir \( \
  82. \( -name client -type d -prune \) -o \
  83. \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  84. fi
  85. ;;
  86. *)
  87. lib_jvm_dir=`find $javabasedir -follow \( \
  88. \( -name client -type d -prune \) -o \
  89. \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  90. if test -z "$lib_jvm_dir"; then
  91. lib_jvm_dir=`find $javabasedir -follow \( \
  92. \( -name client -type d -prune \) -o \
  93. \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
  94. fi
  95. ;;
  96. esac
  97. echo $lib_jvm_dir
  98. }
  99. LIB_JVM_DIR=`findlibjvm`
  100. echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
  101. echo LIB_JVM_DIR = $LIB_JVM_DIR
  102. echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
  103. # Put delays to ensure hdfs is up and running and also shuts down
  104. # after the tests are complete
  105. cd $HADOOP_HOME
  106. echo Y | $HADOOP_BIN_DIR/hadoop namenode -format &&
  107. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start namenode && sleep 2 &&
  108. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start datanode && sleep 2 &&
  109. sleep 20
  110. echo CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIBHDFS_INSTALL_DIR/libhdfs.so:$LIB_JVM_DIR/libjvm.so" $LIBHDFS_BUILD_DIR/$HDFS_TEST &&
  111. CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:" $LIBHDFS_BUILD_DIR/$HDFS_TEST
  112. BUILD_STATUS=$?
  113. sleep 3
  114. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs stop datanode && sleep 2 &&
  115. $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs stop namenode && sleep 2
  116. echo exiting with $BUILD_STATUS
  117. exit $BUILD_STATUS