start-dfs.sh 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # Start hadoop dfs daemons.
  17. # Optinally upgrade or rollback dfs state.
  18. # Run this on master node.
  19. usage="Usage: start-dfs.sh [-upgrade|-rollback]"
  20. bin=`dirname "${BASH_SOURCE-$0}"`
  21. bin=`cd "$bin"; pwd`
  22. . "$bin/hdfs-config.sh"
  23. # get arguments
  24. if [ $# -ge 1 ]; then
  25. nameStartOpt="$1"
  26. shift
  27. case "$nameStartOpt" in
  28. (-upgrade)
  29. ;;
  30. (-rollback)
  31. dataStartOpt="$nameStartOpt"
  32. ;;
  33. (*)
  34. echo $usage
  35. exit 1
  36. ;;
  37. esac
  38. fi
  39. #---------------------------------------------------------
  40. # namenodes
  41. NAMENODES=$($HADOOP_HDFS_HOME/bin/hdfs getconf -namenodes)
  42. echo "Starting namenodes on [$NAMENODES]"
  43. "$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
  44. --config "$HADOOP_CONF_DIR" \
  45. --hostnames "$NAMENODES" \
  46. --script "$bin/hdfs" start namenode $nameStartOpt
  47. #---------------------------------------------------------
  48. # datanodes (using defalut slaves file)
  49. if [ -n "$HADOOP_SECURE_DN_USER" ]; then
  50. echo \
  51. "Attempting to start secure cluster, skipping datanodes. " \
  52. "Run start-secure-dns.sh as root to complete startup."
  53. else
  54. "$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
  55. --config "$HADOOP_CONF_DIR" \
  56. --script "$bin/hdfs" start datanode $dataStartOpt
  57. fi
  58. #---------------------------------------------------------
  59. # secondary namenodes (if any)
  60. # if there are no secondary namenodes configured it returns
  61. # 0.0.0.0 or empty string
  62. SECONDARY_NAMENODES=$($HADOOP_HDFS_HOME/bin/hdfs getconf -secondarynamenodes 2>&-)
  63. SECONDARY_NAMENODES=${SECONDARY_NAMENODES:='0.0.0.0'}
  64. if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
  65. echo \
  66. "Secondary namenodes are not configured. " \
  67. "Cannot start secondary namenodes."
  68. else
  69. echo "Starting secondary namenodes [$SECONDARY_NAMENODES]"
  70. "$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
  71. --config "$HADOOP_CONF_DIR" \
  72. --hostnames "$SECONDARY_NAMENODES" \
  73. --script "$bin/hdfs" start secondarynamenode
  74. fi
  75. # eof