hadoop-daemon.sh 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. #!/bin/sh
  2. #
  3. # Runs a Hadoop command as a daemon.
  4. #
  5. # Environment Variables
  6. #
  7. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  8. # HADOOP_LOG_DIR Where log files are stored. PWD by default.
  9. # HADOOP_MASTER host:path where hadoop code should be rsync'd from
  10. # HADOOP_PID_DIR The pid files are stored. /tmp by default.
  11. # HADOOP_IDENT_STRING A string representing this instance of hadoop. $USER by default
  12. # HADOOP_NICENESS The scheduling priority for daemons. Defaults to 0.
  13. ##
  14. usage="Usage: hadoop-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) <hadoop-command> <args...>"
  15. # if no args specified, show usage
  16. if [ $# -le 1 ]; then
  17. echo $usage
  18. exit 1
  19. fi
  20. bin=`dirname "$0"`
  21. bin=`cd "$bin"; pwd`
  22. . "$bin"/hadoop-config.sh
  23. # get arguments
  24. startStop=$1
  25. shift
  26. command=$1
  27. shift
  28. hadoop_rotate_log ()
  29. {
  30. log=$1;
  31. num=5;
  32. if [ -n "$2" ]; then
  33. num=$2
  34. fi
  35. if [ -f "$log" ]; then # rotate logs
  36. while [ $num -gt 1 ]; do
  37. prev=`expr $num - 1`
  38. [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
  39. num=$prev
  40. done
  41. mv "$log" "$log.$num";
  42. fi
  43. }
  44. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  45. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  46. fi
  47. # get log directory
  48. if [ "$HADOOP_LOG_DIR" = "" ]; then
  49. export HADOOP_LOG_DIR="$HADOOP_HOME/logs"
  50. fi
  51. mkdir -p "$HADOOP_LOG_DIR"
  52. if [ "$HADOOP_PID_DIR" = "" ]; then
  53. HADOOP_PID_DIR=/tmp
  54. fi
  55. if [ "$HADOOP_IDENT_STRING" = "" ]; then
  56. export HADOOP_IDENT_STRING="$USER"
  57. fi
  58. # some variables
  59. export HADOOP_LOGFILE=hadoop-$HADOOP_IDENT_STRING-$command-`hostname`.log
  60. export HADOOP_ROOT_LOGGER="INFO,DRFA"
  61. log=$HADOOP_LOG_DIR/hadoop-$HADOOP_IDENT_STRING-$command-`hostname`.out
  62. pid=$HADOOP_PID_DIR/hadoop-$HADOOP_IDENT_STRING-$command.pid
  63. # Set default scheduling priority
  64. if [ "$HADOOP_NICENESS" = "" ]; then
  65. export HADOOP_NICENESS=0
  66. fi
  67. case $startStop in
  68. (start)
  69. if [ -f $pid ]; then
  70. if kill -0 `cat $pid` > /dev/null 2>&1; then
  71. echo $command running as process `cat $pid`. Stop it first.
  72. exit 1
  73. fi
  74. fi
  75. if [ "$HADOOP_MASTER" != "" ]; then
  76. echo rsync from $HADOOP_MASTER
  77. rsync -a -e ssh --delete --exclude=.svn $HADOOP_MASTER/ "$HADOOP_HOME"
  78. fi
  79. hadoop_rotate_log $log
  80. echo starting $command, logging to $log
  81. nohup nice -n $HADOOP_NICENESS "$HADOOP_HOME"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
  82. echo $! > $pid
  83. sleep 1; head "$log"
  84. ;;
  85. (stop)
  86. if [ -f $pid ]; then
  87. if kill -0 `cat $pid` > /dev/null 2>&1; then
  88. echo stopping $command
  89. kill `cat $pid`
  90. else
  91. echo no $command to stop
  92. fi
  93. else
  94. echo no $command to stop
  95. fi
  96. ;;
  97. (*)
  98. echo $usage
  99. exit 1
  100. ;;
  101. esac