hadoop-env.sh 3.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. # Licensed to the Apache Software Foundation (ASF) under one or more
  2. # contributor license agreements. See the NOTICE file distributed with
  3. # this work for additional information regarding copyright ownership.
  4. # The ASF licenses this file to You under the Apache License, Version 2.0
  5. # (the "License"); you may not use this file except in compliance with
  6. # the License. You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. # Set Hadoop-specific environment variables here.
  16. # The only required environment variable is JAVA_HOME. All others are
  17. # optional. When running a distributed configuration it is best to
  18. # set JAVA_HOME in this file, so that it is correctly defined on
  19. # remote nodes.
  20. # The java implementation to use.
  21. export JAVA_HOME=${JAVA_HOME}
  22. # The jsvc implementation to use. Jsvc is required to run secure datanodes.
  23. #export JSVC_HOME=${JSVC_HOME}
  24. export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
  25. # Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
  26. for f in $HADOOP_HOME/contrib/capacity-scheduler/*.jar; do
  27. if [ "$HADOOP_CLASSPATH" ]; then
  28. export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f
  29. else
  30. export HADOOP_CLASSPATH=$f
  31. fi
  32. done
  33. # The maximum amount of heap to use, in MB. Default is 1000.
  34. #export HADOOP_HEAPSIZE=
  35. #export HADOOP_NAMENODE_INIT_HEAPSIZE=""
  36. # Extra Java runtime options. Empty by default.
  37. export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
  38. # Command specific options appended to HADOOP_OPTS when specified
  39. export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,NullAppender $HADOOP_NAMENODE_OPTS"
  40. export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
  41. export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,NullAppender $HADOOP_SECONDARYNAMENODE_OPTS"
  42. # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
  43. export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"
  44. #HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
  45. # On secure datanodes, user to run the datanode as after dropping privileges
  46. export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
  47. # Where log files are stored. $HADOOP_HOME/logs by default.
  48. export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
  49. # Where log files are stored in the secure data environment.
  50. export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
  51. # The directory where pid files are stored. /tmp by default.
  52. export HADOOP_PID_DIR=${HADOOP_PID_DIR}
  53. export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
  54. # A string representing this instance of hadoop. $USER by default.
  55. export HADOOP_IDENT_STRING=$USER