slaves.sh 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. #!/bin/bash
  2. #
  3. # Run a shell command on all slave hosts.
  4. #
  5. # Environment Variables
  6. #
  7. # HADOOP_SLAVES File naming remote hosts.
  8. # Default is ${HADOOP_CONF_DIR}/slaves.
  9. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  10. # HADOOP_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
  11. # HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
  12. ##
  13. usage="Usage: slaves.sh command..."
  14. # if no args specified, show usage
  15. if [ $# -le 0 ]; then
  16. echo $usage
  17. exit 1
  18. fi
  19. # resolve links - $0 may be a softlink
  20. this="$0"
  21. while [ -h "$this" ]; do
  22. ls=`ls -ld "$this"`
  23. link=`expr "$ls" : '.*-> \(.*\)$'`
  24. if expr "$link" : '.*/.*' > /dev/null; then
  25. this="$link"
  26. else
  27. this=`dirname "$this"`/"$link"
  28. fi
  29. done
  30. # the root of the Hadoop installation
  31. HADOOP_HOME=`dirname "$this"`/..
  32. # Allow alternate conf dir location.
  33. HADOOP_CONF_DIR="${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}"
  34. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  35. source "${HADOOP_CONF_DIR}/hadoop-env.sh"
  36. fi
  37. if [ "$HADOOP_SLAVES" = "" ]; then
  38. export HADOOP_SLAVES="${HADOOP_CONF_DIR}/slaves"
  39. fi
  40. for slave in `cat "$HADOOP_SLAVES"`; do
  41. ssh $HADOOP_SSH_OPTS $slave $"${@// /\\ }" \
  42. 2>&1 | sed "s/^/$slave: /" &
  43. if [ "$HADOOP_SLAVE_SLEEP" != "" ]; then
  44. sleep $HADOOP_SLAVE_SLEEP
  45. fi
  46. done
  47. wait