slaves.sh 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. #!/bin/bash
  2. #
  3. # Run a shell command on all slave hosts.
  4. #
  5. # Environment Variables
  6. #
  7. # HADOOP_SLAVES File naming remote hosts. Default is ~/.slaves
  8. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  9. # HADOOP_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
  10. ##
  11. usage="Usage: slaves.sh command..."
  12. # if no args specified, show usage
  13. if [ $# -le 0 ]; then
  14. echo $usage
  15. exit 1
  16. fi
  17. # resolve links - $0 may be a softlink
  18. this="$0"
  19. while [ -h "$this" ]; do
  20. ls=`ls -ld "$this"`
  21. link=`expr "$ls" : '.*-> \(.*\)$'`
  22. if expr "$link" : '.*/.*' > /dev/null; then
  23. this="$link"
  24. else
  25. this=`dirname "$this"`/"$link"
  26. fi
  27. done
  28. # the root of the Hadoop installation
  29. HADOOP_HOME=`dirname "$this"`/..
  30. # Allow alternate conf dir location.
  31. HADOOP_CONF_DIR="${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}"
  32. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  33. source "${HADOOP_CONF_DIR}/hadoop-env.sh"
  34. fi
  35. if [ "$HADOOP_SLAVES" = "" ]; then
  36. export HADOOP_SLAVES="${HADOOP_CONF_DIR}/slaves"
  37. fi
  38. # By default, forward HADOOP_CONF_DIR environment variable to the
  39. # remote slave. Remote slave must have following added to its
  40. # /etc/ssh/sshd_config:
  41. # AcceptEnv HADOOP_CONF_DIR
  42. # See'man ssh_config for more on SendEnv and AcceptEnv.
  43. if [ "$HADOOP_SSH_OPTS" = "" ]; then
  44. export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
  45. fi
  46. for slave in `cat "$HADOOP_SLAVES"`; do
  47. ssh $HADOOP_SSH_OPTS $slave $"${@// /\\ }" \
  48. 2>&1 | sed "s/^/$slave: /" &
  49. if [ "$HADOOP_SLAVE_SLEEP" != "" ]; then
  50. sleep $HADOOP_SLAVE_SLEEP
  51. fi
  52. done
  53. wait