distribute-exclude.sh 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # ------------------------------------------------------------------
  17. #
  18. # The purpose of this script is to distribute the exclude file (see
  19. # "dfs.hosts.exclude" in hdfs-site.xml).
  20. #
  21. # Input of the script is a local exclude file. The exclude file
  22. # will be distributed to all the namenodes. The location on the namenodes
  23. # is determined by the configuration "dfs.hosts.exclude" in hdfs-site.xml
  24. # (this value is read from the local copy of hdfs-site.xml and must be same
  25. # on all the namenodes).
  26. #
  27. # The user running this script needs write permissions on the target
  28. # directory on namenodes.
  29. #
  30. # After this command, run refresh-namenodes.sh so that namenodes start
  31. # using the new exclude file.
  32. bin=`dirname "$0"`
  33. bin=`cd "$bin"; pwd`
  34. DEFAULT_LIBEXEC_DIR="$bin"/../libexec
  35. HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
  36. . $HADOOP_LIBEXEC_DIR/hdfs-config.sh
  37. if [ "$1" = '' ] ; then
  38. "Error: please specify local exclude file as a first argument"
  39. exit 1
  40. else
  41. excludeFilenameLocal=$1
  42. fi
  43. if [ ! -f "$excludeFilenameLocal" ] ; then
  44. echo "Error: exclude file [$excludeFilenameLocal] does not exist."
  45. exit 1
  46. fi
  47. namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -namenodes)
  48. excludeFilenameRemote=$("$HADOOP_PREFIX/bin/hdfs" getconf -excludeFile)
  49. if [ "$excludeFilenameRemote" = '' ] ; then
  50. echo \
  51. "Error: hdfs getconf -excludeFile returned empty string, " \
  52. "please setup dfs.hosts.exclude in hdfs-site.xml in local cluster " \
  53. "configuration and on all namenodes"
  54. exit 1
  55. fi
  56. echo "Copying exclude file [$excludeFilenameRemote] to namenodes:"
  57. for namenode in $namenodes ; do
  58. echo " [$namenode]"
  59. scp "$excludeFilenameLocal" "$namenode:$excludeFilenameRemote"
  60. if [ "$?" != '0' ] ; then errorFlag='1' ; fi
  61. done
  62. if [ "$errorFlag" = '1' ] ; then
  63. echo "Error: transfer of exclude file failed, see error messages above."
  64. exit 1
  65. else
  66. echo "Transfer of exclude file to all namenodes succeeded."
  67. fi
  68. # eof