Bladeren bron

Fix HADOOP-36. Scripts now source conf/hadoop-env.sh, to faciliate setting of environment variables, even on remote hosts. The default slaves file has move from ~/.slaves to conf/slaves.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@378078 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 19 jaren geleden
bovenliggende
commit
510f87ef59
5 gewijzigde bestanden met toevoegingen van 58 en 5 verwijderingen
  1. 4 0
      bin/hadoop
  2. 8 4
      bin/hadoop-daemon.sh
  3. 20 1
      bin/slaves.sh
  4. 25 0
      conf/hadoop-env.sh.template
  5. 1 0
      conf/slaves.template

+ 4 - 0
bin/hadoop

@@ -49,6 +49,10 @@ shift
 THIS_DIR=`dirname "$THIS"`
 HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
 
+if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
+  source ${HADOOP_HOME}/conf/hadoop-env.sh
+fi
+
 # some Java parameters
 if [ "$JAVA_HOME" != "" ]; then
   #echo "run java in $JAVA_HOME"

+ 8 - 4
bin/hadoop-daemon.sh

@@ -37,11 +37,15 @@ while [ -h "$this" ]; do
 done
 
 # the root of the Hadoop installation
-root=`dirname $this`/..
+HADOOP_HOME=`dirname $this`/..
+
+if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
+  source ${HADOOP_HOME}/conf/hadoop-env.sh
+fi
 
 # get log directory
 if [ "$HADOOP_LOG_DIR" = "" ]; then
-  HADOOP_LOG_DIR=$root/logs
+  HADOOP_LOG_DIR=$HADOOP_HOME/logs
   mkdir -p $HADOOP_LOG_DIR
 fi
 
@@ -70,10 +74,10 @@ case $startStop in
 
     if [ "$HADOOP_MASTER" != "" ]; then
       echo rsync from $HADOOP_MASTER
-      rsync -a --delete --exclude=.svn $HADOOP_MASTER/ $root
+      rsync -a --delete --exclude=.svn $HADOOP_MASTER/ $HADOOP_HOME
     fi
 
-    cd $root
+    cd $HADOOP_HOME
     echo starting $command, logging to $log
     nohup bin/hadoop $command "$@" >& $log < /dev/null &
     echo $! > $pid

+ 20 - 1
bin/slaves.sh

@@ -15,8 +15,27 @@ if [ $# -le 0 ]; then
   exit 1
 fi
 
+# resolve links - $0 may be a softlink
+this="$0"
+while [ -h "$this" ]; do
+  ls=`ls -ld "$this"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    this="$link"
+  else
+    this=`dirname "$this"`/"$link"
+  fi
+done
+
+# the root of the Hadoop installation
+HADOOP_HOME=`dirname $this`/..
+
+if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
+  source ${HADOOP_HOME}/conf/hadoop-env.sh
+fi
+
 if [ "$HADOOP_SLAVES" = "" ]; then
-  export HADOOP_SLAVES=$HOME/.slaves
+  export HADOOP_SLAVES=$HADOOP_HOME/conf/slaves
 fi
 
 for slave in `cat $HADOOP_SLAVES`; do

+ 25 - 0
conf/hadoop-env.sh.template

@@ -0,0 +1,25 @@
+# Set Hadoop-specific environment variables here.
+
+# The java implementation to use.
+# export JAVA_HOME=/usr/bin/java
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HADOOP_HEAPSIZE=2000
+
+# Extra Java runtime options.  Empty by default.
+# export HADOOP_OPTS=-server
+
+# Where log files are stored.  $HADOOP_HOME/logs by default.
+# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
+
+# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+# host:path where hadoop code should be rsync'd from.  Unset by default.
+# export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+# The directory where pid files are stored. /tmp by default.
+# export HADOOP_PID_DIR=/var/hadoop/pids
+
+# A string representing this instance of hadoop. $USER by default.
+# export HADOOP_IDENT_STRING=$USER

+ 1 - 0
conf/slaves.template

@@ -0,0 +1 @@
+localhost