slaves.sh 2.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # Run a shell command on all slave hosts.
  17. #
  18. # Environment Variables
  19. #
  20. # HADOOP_SLAVES File naming remote hosts.
  21. # Default is ${HADOOP_CONF_DIR}/slaves.
  22. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  23. # HADOOP_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
  24. # HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
  25. ##
  26. usage="Usage: slaves.sh [--config confdir] command..."
  27. # if no args specified, show usage
  28. if [ $# -le 0 ]; then
  29. echo $usage
  30. exit 1
  31. fi
  32. bin=`dirname "${BASH_SOURCE-$0}"`
  33. bin=`cd "$bin"; pwd`
  34. . "$bin"/hadoop-config.sh
  35. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  36. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  37. fi
  38. # Where to start the script, see hadoop-config.sh
  39. # (it set up the variables based on command line options)
  40. if [ "$HADOOP_SLAVE_NAMES" != '' ] ; then
  41. SLAVE_NAMES=$HADOOP_SLAVE_NAMES
  42. else
  43. SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
  44. SLAVE_NAMES=$(cat "$SLAVE_FILE" | sed 's/#.*$//;/^$/d')
  45. fi
  46. # start the daemons
  47. for slave in $SLAVE_NAMES ; do
  48. ssh $HADOOP_SSH_OPTS $slave $"${@// /\\ }" \
  49. 2>&1 | sed "s/^/$slave: /" &
  50. if [ "$HADOOP_SLAVE_SLEEP" != "" ]; then
  51. sleep $HADOOP_SLAVE_SLEEP
  52. fi
  53. done
  54. wait