hadoop-daemon.sh 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # Runs a Hadoop command as a daemon.
  17. #
  18. # Environment Variables
  19. #
  20. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_PREFIX}/conf.
  21. # HADOOP_LOG_DIR Where log files are stored. PWD by default.
  22. # HADOOP_MASTER host:path where hadoop code should be rsync'd from
  23. # HADOOP_PID_DIR The pid files are stored. /tmp by default.
  24. # HADOOP_IDENT_STRING A string representing this instance of hadoop. $USER by default
  25. # HADOOP_NICENESS The scheduling priority for daemons. Defaults to 0.
  26. ##
  27. usage="Usage: hadoop-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) <hadoop-command> <args...>"
  28. # if no args specified, show usage
  29. if [ $# -le 1 ]; then
  30. echo $usage
  31. exit 1
  32. fi
  33. bin=`dirname "$0"`
  34. bin=`cd "$bin"; pwd`
  35. if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
  36. . "$bin"/../libexec/hadoop-config.sh
  37. else
  38. . "$bin/hadoop-config.sh"
  39. fi
  40. # get arguments
  41. startStop=$1
  42. shift
  43. command=$1
  44. shift
  45. hadoop_rotate_log ()
  46. {
  47. log=$1;
  48. num=5;
  49. if [ -n "$2" ]; then
  50. num=$2
  51. fi
  52. if [ -f "$log" ]; then # rotate logs
  53. while [ $num -gt 1 ]; do
  54. prev=`expr $num - 1`
  55. [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
  56. num=$prev
  57. done
  58. mv "$log" "$log.$num";
  59. fi
  60. }
  61. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  62. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  63. fi
  64. # Determine if we're starting a secure datanode, and if so, redefine appropriate variables
  65. if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
  66. export HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
  67. export HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
  68. export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
  69. starting_secure_dn="true"
  70. fi
  71. if [ "$HADOOP_IDENT_STRING" = "" ]; then
  72. export HADOOP_IDENT_STRING="$USER"
  73. fi
  74. # get log directory
  75. if [ "$HADOOP_LOG_DIR" = "" ]; then
  76. export HADOOP_LOG_DIR="$HADOOP_HOME/logs"
  77. fi
  78. mkdir -p "$HADOOP_LOG_DIR"
  79. touch $HADOOP_LOG_DIR/.hadoop_test > /dev/null 2>&1
  80. TEST_LOG_DIR=$?
  81. if [ "${TEST_LOG_DIR}" = "0" ]; then
  82. rm -f $HADOOP_LOG_DIR/.hadoop_test
  83. else
  84. chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR
  85. fi
  86. if [ "$HADOOP_PID_DIR" = "" ]; then
  87. HADOOP_PID_DIR=/tmp
  88. fi
  89. # some variables
  90. export HADOOP_LOGFILE=hadoop-$HADOOP_IDENT_STRING-$command-$HOSTNAME.log
  91. export HADOOP_ROOT_LOGGER="INFO,DRFA"
  92. log=$HADOOP_LOG_DIR/hadoop-$HADOOP_IDENT_STRING-$command-$HOSTNAME.out
  93. pid=$HADOOP_PID_DIR/hadoop-$HADOOP_IDENT_STRING-$command.pid
  94. HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
  95. # Set default scheduling priority
  96. if [ "$HADOOP_NICENESS" = "" ]; then
  97. export HADOOP_NICENESS=0
  98. fi
  99. case $startStop in
  100. (start)
  101. mkdir -p "$HADOOP_PID_DIR"
  102. if [ -f $pid ]; then
  103. if kill -0 `cat $pid` > /dev/null 2>&1; then
  104. echo $command running as process `cat $pid`. Stop it first.
  105. exit 1
  106. fi
  107. fi
  108. if [ "$HADOOP_MASTER" != "" ]; then
  109. echo rsync from $HADOOP_MASTER
  110. rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HADOOP_MASTER/ "$HADOOP_HOME"
  111. fi
  112. hadoop_rotate_log $log
  113. echo starting $command, logging to $log
  114. cd "$HADOOP_PREFIX"
  115. nohup nice -n $HADOOP_NICENESS "$HADOOP_PREFIX"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
  116. echo $! > $pid
  117. sleep 1
  118. head "$log"
  119. # capture the ulimit output
  120. if [ "true" = "$starting_secure_dn" ]; then
  121. echo "ulimit -a for secure datanode user $HADOOP_SECURE_DN_USER" >> $log
  122. # capture the ulimit info for the appropriate user
  123. su --shell=/bin/bash $HADOOP_SECURE_DN_USER -c 'ulimit -a' >> $log 2>&1
  124. else
  125. echo "ulimit -a for user $USER" >> $log
  126. ulimit -a >> $log 2>&1
  127. fi
  128. ;;
  129. (stop)
  130. if [ -f $pid ]; then
  131. TARGET_PID=`cat $pid`
  132. if kill -0 $TARGET_PID > /dev/null 2>&1; then
  133. echo stopping $command
  134. kill $TARGET_PID
  135. sleep $HADOOP_STOP_TIMEOUT
  136. if kill -0 $TARGET_PID > /dev/null 2>&1; then
  137. echo "$command did not stop gracefully after $HADOOP_STOP_TIMEOUT seconds: killing with kill -9"
  138. kill -9 $TARGET_PID
  139. fi
  140. else
  141. echo no $command to stop
  142. fi
  143. else
  144. echo no $command to stop
  145. fi
  146. ;;
  147. (*)
  148. echo $usage
  149. exit 1
  150. ;;
  151. esac