hdfs-config.sh 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # included in all the hdfs scripts with source command
  17. # should not be executed directly
  18. function hadoop_subproject_init
  19. {
  20. if [ -e "${HADOOP_CONF_DIR}/hdfs-env.sh" ]; then
  21. . "${HADOOP_CONF_DIR}/hdfs-env.sh"
  22. fi
  23. # at some point in time, someone thought it would be a good idea to
  24. # create separate vars for every subproject. *sigh*
  25. # let's perform some overrides and setup some defaults for bw compat
  26. # this way the common hadoop var's == subproject vars and can be
  27. # used interchangeable from here on out
  28. # ...
  29. # this should get deprecated at some point.
  30. HADOOP_LOG_DIR="${HADOOP_HDFS_LOG_DIR:-$HADOOP_LOG_DIR}"
  31. HADOOP_HDFS_LOG_DIR="${HADOOP_LOG_DIR}"
  32. HADOOP_LOGFILE="${HADOOP_HDFS_LOGFILE:-$HADOOP_LOGFILE}"
  33. HADOOP_HDFS_LOGFILE="${HADOOP_LOGFILE}"
  34. HADOOP_NICENESS=${HADOOP_HDFS_NICENESS:-$HADOOP_NICENESS}
  35. HADOOP_HDFS_NICENESS="${HADOOP_NICENESS}"
  36. HADOOP_STOP_TIMEOUT=${HADOOP_HDFS_STOP_TIMEOUT:-$HADOOP_STOP_TIMEOUT}
  37. HADOOP_HDFS_STOP_TIMEOUT="${HADOOP_STOP_TIMEOUT}"
  38. HADOOP_PID_DIR="${HADOOP_HDFS_PID_DIR:-$HADOOP_PID_DIR}"
  39. HADOOP_HDFS_PID_DIR="${HADOOP_PID_DIR}"
  40. HADOOP_ROOT_LOGGER=${HADOOP_HDFS_ROOT_LOGGER:-$HADOOP_ROOT_LOGGER}
  41. HADOOP_HDFS_ROOT_LOGGER="${HADOOP_ROOT_LOGGER}"
  42. HADOOP_HDFS_HOME="${HADOOP_HDFS_HOME:-$HADOOP_HOME_DIR}"
  43. HADOOP_IDENT_STRING="${HADOOP_HDFS_IDENT_STRING:-$HADOOP_IDENT_STRING}"
  44. HADOOP_HDFS_IDENT_STRING="${HADOOP_IDENT_STRING}"
  45. # turn on the defaults
  46. export HADOOP_NAMENODE_OPTS=${HADOOP_NAMENODE_OPTS:-"-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender"}
  47. export HADOOP_SECONDARYNAMENODE_OPTS=${HADOOP_SECONDARYNAMENODE_OPTS:-"-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender"}
  48. export HADOOP_DATANODE_OPTS=${HADOOP_DATANODE_OPTS:-"-Dhadoop.security.logger=ERROR,RFAS"}
  49. export HADOOP_DN_SECURE_EXTRA_OPTS=${HADOOP_DN_SECURE_EXTRA_OPTS:-"-jvm server"}
  50. export HADOOP_NFS3_SECURE_EXTRA_OPTS=${HADOOP_NFS3_SECURE_EXTRA_OPTS:-"-jvm server"}
  51. export HADOOP_PORTMAP_OPTS=${HADOOP_PORTMAP_OPTS:-"-Xmx512m"}
  52. }
  53. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
  54. _hd_this="${BASH_SOURCE-$0}"
  55. HADOOP_LIBEXEC_DIR=$(cd -P -- "$(dirname -- "${_hd_this}")" >/dev/null && pwd -P)
  56. fi
  57. if [ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]; then
  58. . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
  59. elif [ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]; then
  60. . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
  61. elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then
  62. . "${HADOOP_HOME}/libexec/hadoop-config.sh"
  63. else
  64. echo "ERROR: Hadoop common not found." 2>&1
  65. exit 1
  66. fi