Explorar el Código

Fix for HADOOP-60, with help from Owen & Michael.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@382465 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting hace 19 años
padre
commit
9ef99db0ed
Se han modificado 4 ficheros con 32 adiciones y 14 borrados
  1. 11 6
      bin/hadoop
  2. 6 2
      bin/hadoop-daemon.sh
  3. 13 4
      bin/slaves.sh
  4. 2 2
      conf/hadoop-env.sh.template

+ 11 - 6
bin/hadoop

@@ -4,13 +4,15 @@
 #
 # Environment Variables
 #
-#   JAVA_HOME The java implementation to use.  Overrides JAVA_HOME.
+#   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
 #
 #   HADOOP_HEAPSIZE  The maximum amount of heap to use, in MB. 
-#                   Default is 1000.
+#                    Default is 1000.
 #
 #   HADOOP_OPTS      Extra Java runtime options.
 #
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
+#
 
 # resolve links - $0 may be a softlink
 THIS="$0"
@@ -49,8 +51,11 @@ shift
 THIS_DIR=`dirname "$THIS"`
 HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
 
-if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source "${HADOOP_HOME}/conf/hadoop-env.sh"
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  source ${HADOOP_CONF_DIR}/hadoop-env.sh
 fi
 
 # some Java parameters
@@ -74,8 +79,8 @@ if [ "$HADOOP_HEAPSIZE" != "" ]; then
   #echo $JAVA_HEAP_MAX
 fi
 
-# CLASSPATH initially contains $HADOOP_CONF_DIR, or defaults to $HADOOP_HOME/conf
-CLASSPATH=${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}
+# CLASSPATH initially contains $HADOOP_CONF_DIR
+CLASSPATH="${HADOOP_CONF_DIR}"
 CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
 
 # for developers, add Hadoop classes to CLASSPATH

+ 6 - 2
bin/hadoop-daemon.sh

@@ -4,6 +4,7 @@
 #
 # Environment Variables
 #
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 #   HADOOP_LOG_DIR   Where log files are stored.  PWD by default.
 #   HADOOP_MASTER    host:path where hadoop code should be rsync'd from
 #   HADOOP_PID_DIR   The pid files are stored. /tmp by default.
@@ -39,8 +40,11 @@ done
 # the root of the Hadoop installation
 HADOOP_HOME=`dirname "$this"`/..
 
-if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source "${HADOOP_HOME}/conf/hadoop-env.sh"
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  source "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
 # get log directory

+ 13 - 4
bin/slaves.sh

@@ -5,6 +5,7 @@
 # Environment Variables
 #
 #   HADOOP_SLAVES    File naming remote hosts.  Default is ~/.slaves
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 ##
 
 usage="Usage: slaves.sh command..."
@@ -30,16 +31,24 @@ done
 # the root of the Hadoop installation
 HADOOP_HOME=`dirname "$this"`/..
 
-if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source "${HADOOP_HOME}/conf/hadoop-env.sh"
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}"
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  source "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
 if [ "$HADOOP_SLAVES" = "" ]; then
-  export HADOOP_SLAVES="$HADOOP_HOME/conf/slaves"
+  export HADOOP_SLAVES="${HADOOP_CONF_DIR}/slaves"
 fi
 
+# By default, forward HADOOP_CONF_DIR environment variable to the
+# remote slave. Remote slave must have following added to its
+# /etc/ssh/sshd_config:
+#   AcceptEnv HADOOP_CONF_DIR
+# See'man ssh_config for more on SendEnv and AcceptEnv.
 if [ "$HADOOP_SSH_OPTS" = "" ]; then
-  export HADOOP_SSH_OPTS="-o ConnectTimeout=1"
+  export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
 fi
 
 for slave in `cat "$HADOOP_SLAVES"`; do

+ 2 - 2
conf/hadoop-env.sh.template

@@ -14,8 +14,8 @@
 # Extra Java runtime options.  Empty by default.
 # export HADOOP_OPTS=-server
 
-# Extra ssh options.  Default is '-o ConnectTimeout=1'.
-# export HADOOP_SSH_OPTS="-o ConnectTimeout=1"
+# Extra ssh options.  Default: '-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR'.
+# export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
 
 # Where log files are stored.  $HADOOP_HOME/logs by default.
 # export HADOOP_LOG_DIR=${HADOOP_HOME}/logs