hadoop-daemon.sh 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. #!/bin/bash
  2. #
  3. # Runs a Hadoop command as a daemon.
  4. #
  5. # Environment Variables
  6. #
  7. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  8. # HADOOP_LOG_DIR Where log files are stored. PWD by default.
  9. # HADOOP_MASTER host:path where hadoop code should be rsync'd from
  10. # HADOOP_PID_DIR The pid files are stored. /tmp by default.
  11. # HADOOP_IDENT_STRING A string representing this instance of hadoop. $USER by default
  12. ##
  13. usage="Usage: hadoop-daemon [start|stop] [hadoop-command] [args...]"
  14. # if no args specified, show usage
  15. if [ $# -le 1 ]; then
  16. echo $usage
  17. exit 1
  18. fi
  19. # get arguments
  20. startStop=$1
  21. shift
  22. command=$1
  23. shift
  24. # resolve links - $0 may be a softlink
  25. this="$0"
  26. while [ -h "$this" ]; do
  27. ls=`ls -ld "$this"`
  28. link=`expr "$ls" : '.*-> \(.*\)$'`
  29. if expr "$link" : '.*/.*' > /dev/null; then
  30. this="$link"
  31. else
  32. this=`dirname "$this"`/"$link"
  33. fi
  34. done
  35. # the root of the Hadoop installation
  36. export HADOOP_HOME=`dirname "$this"`/..
  37. # Allow alternate conf dir location.
  38. HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
  39. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  40. source "${HADOOP_CONF_DIR}/hadoop-env.sh"
  41. fi
  42. # get log directory
  43. if [ "$HADOOP_LOG_DIR" = "" ]; then
  44. export HADOOP_LOG_DIR="$HADOOP_HOME/logs"
  45. fi
  46. mkdir -p "$HADOOP_LOG_DIR"
  47. if [ "$HADOOP_PID_DIR" = "" ]; then
  48. HADOOP_PID_DIR=/tmp
  49. fi
  50. if [ "$HADOOP_IDENT_STRING" = "" ]; then
  51. export HADOOP_IDENT_STRING="$USER"
  52. fi
  53. # some variables
  54. export HADOOP_LOGFILE=hadoop-$HADOOP_IDENT_STRING-$command-`hostname`.log
  55. export HADOOP_ROOT_LOGGER="INFO,DRFA"
  56. log=$HADOOP_LOG_DIR/hadoop-$HADOOP_IDENT_STRING-$command-`hostname`.out
  57. pid=$HADOOP_PID_DIR/hadoop-$HADOOP_IDENT_STRING-$command.pid
  58. case $startStop in
  59. (start)
  60. if [ -f $pid ]; then
  61. if [ -a /proc/`cat $pid` ]; then
  62. echo $command running as process `cat $pid`. Stop it first.
  63. exit 1
  64. fi
  65. fi
  66. if [ "$HADOOP_MASTER" != "" ]; then
  67. echo rsync from $HADOOP_MASTER
  68. rsync -a -e ssh --delete --exclude=.svn $HADOOP_MASTER/ "$HADOOP_HOME"
  69. fi
  70. echo starting $command, logging to $log
  71. nohup "$HADOOP_HOME"/bin/hadoop $command "$@" >& "$log" < /dev/null &
  72. echo $! > $pid
  73. sleep 1; head "$log"
  74. ;;
  75. (stop)
  76. if [ -f $pid ]; then
  77. if [ -a /proc/`cat $pid` ]; then
  78. echo stopping $command
  79. kill `cat $pid`
  80. else
  81. echo no $command to stop
  82. fi
  83. else
  84. echo no $command to stop
  85. fi
  86. ;;
  87. (*)
  88. echo $usage
  89. exit 1
  90. ;;
  91. esac