فهرست منبع

HADOOP-260. Add -config option to shell scripts. Contributed by Milind.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@424973 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 19 سال پیش
والد
کامیت
08036d675f
13فایلهای تغییر یافته به همراه82 افزوده شده و 88 حذف شده
  1. 3 0
      CHANGES.txt
  2. 5 19
      bin/hadoop
  3. 33 0
      bin/hadoop-config.sh
  4. 7 20
      bin/hadoop-daemon.sh
  5. 3 3
      bin/hadoop-daemons.sh
  6. 3 17
      bin/rcc
  7. 4 17
      bin/slaves.sh
  8. 4 2
      bin/start-all.sh
  9. 4 2
      bin/start-dfs.sh
  10. 4 2
      bin/start-mapred.sh
  11. 4 2
      bin/stop-all.sh
  12. 4 2
      bin/stop-dfs.sh
  13. 4 2
      bin/stop-mapred.sh

+ 3 - 0
CHANGES.txt

@@ -66,6 +66,9 @@ Trunk (unreleased changes)
 18. HADOOP-376.  Fix datanode's HTTP server to scan for a free port.
     (omalley via cutting)
 
+19. HADOOP-260.  Add --config option to shell scripts, specifying an
+    alternate configuration directory. (Milind Bhandarkar via cutting)
+
 
 Release 0.4.0 - 2006-06-28
 

+ 5 - 19
bin/hadoop

@@ -16,21 +16,14 @@
 #   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
 #
 
-# resolve links - $0 may be a softlink
-THIS="$0"
-while [ -h "$THIS" ]; do
-  ls=`ls -ld "$THIS"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    THIS="$link"
-  else
-    THIS=`dirname "$THIS"`/"$link"
-  fi
-done
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+source "$bin"/hadoop-config.sh
 
 # if no args specified, show usage
 if [ $# = 0 ]; then
-  echo "Usage: hadoop COMMAND"
+  echo "Usage: hadoop [--config confdir] COMMAND"
   echo "where COMMAND is one of:"
   echo "  namenode -format  format the DFS filesystem"
   echo "  namenode          run the DFS namenode"
@@ -52,13 +45,6 @@ fi
 COMMAND=$1
 shift
 
-# some directories
-THIS_DIR=`dirname "$THIS"`
-export HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
-
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
-
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi

+ 33 - 0
bin/hadoop-config.sh

@@ -0,0 +1,33 @@
+# included in all the hadoop scripts with source command
+# should not be executable directly
+# also should not be passed any arguments, since we need original $*
+
+# resolve links - $0 may be a softlink
+this="$0"
+while [ -h "$this" ]; do
+  ls=`ls -ld "$this"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    this="$link"
+  else
+    this=`dirname "$this"`/"$link"
+  fi
+done
+
+# the root of the Hadoop installation
+export HADOOP_HOME=`dirname "$this"`/..
+
+#check to see if the conf dir is given as an optional argument
+if [ $# -gt 1 ]
+then
+    if [ "--config" = "$1" ]
+	  then
+	      shift
+	      confdir=$1
+	      shift
+	      HADOOP_CONF_DIR=$confdir
+    fi
+fi
+ 
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"

+ 7 - 20
bin/hadoop-daemon.sh

@@ -11,7 +11,7 @@
 #   HADOOP_IDENT_STRING   A string representing this instance of hadoop. $USER by default
 ##
 
-usage="Usage: hadoop-daemon [start|stop] [hadoop-command] [args...]"
+usage="Usage: hadoop-daemon.sh [--config <conf-dir>] (start|stop) <hadoop-command> <args...>"
 
 # if no args specified, show usage
 if [ $# -le 1 ]; then
@@ -19,30 +19,17 @@ if [ $# -le 1 ]; then
   exit 1
 fi
 
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+source "$bin"/hadoop-config.sh
+
 # get arguments
 startStop=$1
 shift
 command=$1
 shift
 
-# resolve links - $0 may be a softlink
-this="$0"
-while [ -h "$this" ]; do
-  ls=`ls -ld "$this"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    this="$link"
-  else
-    this=`dirname "$this"`/"$link"
-  fi
-done
-
-# the root of the Hadoop installation
-export HADOOP_HOME=`dirname "$this"`/..
-
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
-
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
@@ -84,7 +71,7 @@ case $startStop in
     fi
 
     echo starting $command, logging to $log
-    nohup "$HADOOP_HOME"/bin/hadoop $command "$@" > "$log" 2>&1 < /dev/null &
+    nohup "$HADOOP_HOME"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
     echo $! > $pid
     sleep 1; head "$log"
     ;;

+ 3 - 3
bin/hadoop-daemons.sh

@@ -2,7 +2,7 @@
 # 
 # Run a Hadoop command on all slave hosts.
 
-usage="Usage: hadoop-daemons.sh [start|stop] command args..."
+usage="Usage: hadoop-daemons.sh [--config confdir] [start|stop] command args..."
 
 # if no args specified, show usage
 if [ $# -le 1 ]; then
@@ -13,6 +13,6 @@ fi
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-HADOOP_HOME="$bin/.."
+source $bin/hadoop-config.sh
 
-exec "$bin/slaves.sh" cd "$HADOOP_HOME" \; "$bin/hadoop-daemon.sh" "$@"
+exec "$bin/slaves.sh" --config $HADOOP_CONF_DIR cd "$HADOOP_HOME" \; "$bin/hadoop-daemon.sh" --config $HADOOP_CONF_DIR "$@"

+ 3 - 17
bin/rcc

@@ -11,24 +11,10 @@
 #   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 #
 
-# resolve links - $0 may be a softlink
-THIS="$0"
-while [ -h "$THIS" ]; do
-  ls=`ls -ld "$THIS"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    THIS="$link"
-  else
-    THIS=`dirname "$THIS"`/"$link"
-  fi
-done
-
-# some directories
-THIS_DIR=`dirname "$THIS"`
-HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+source "$bin"/hadoop-config.sh
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"

+ 4 - 17
bin/slaves.sh

@@ -11,7 +11,7 @@
 #   HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
 ##
 
-usage="Usage: slaves.sh command..."
+usage="Usage: slaves.sh [--config confdir] command..."
 
 # if no args specified, show usage
 if [ $# -le 0 ]; then
@@ -19,23 +19,10 @@ if [ $# -le 0 ]; then
   exit 1
 fi
 
-# resolve links - $0 may be a softlink
-this="$0"
-while [ -h "$this" ]; do
-  ls=`ls -ld "$this"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    this="$link"
-  else
-    this=`dirname "$this"`/"$link"
-  fi
-done
-
-# the root of the Hadoop installation
-HADOOP_HOME=`dirname "$this"`/..
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}"
+source "$bin"/hadoop-config.sh
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"

+ 4 - 2
bin/start-all.sh

@@ -5,8 +5,10 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
+source "$bin"/hadoop-config.sh
+
 # start dfs daemons
-"$bin"/start-dfs.sh
+"$bin"/start-dfs.sh --config $HADOOP_CONF_DIR
 
 # start mapred daemons
-"$bin"/start-mapred.sh
+"$bin"/start-mapred.sh --config $HADOOP_CONF_DIR

+ 4 - 2
bin/start-dfs.sh

@@ -5,8 +5,10 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
+source "$bin"/hadoop-config.sh
+
 # start dfs daemons
 # start namenode after datanodes, to minimize time namenode is up w/o data
 # note: datanodes will log connection errors until namenode starts
-"$bin"/hadoop-daemon.sh start namenode
-"$bin"/hadoop-daemons.sh start datanode
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start namenode
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start datanode

+ 4 - 2
bin/start-mapred.sh

@@ -5,7 +5,9 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
+source "$bin"/hadoop-config.sh
+
 # start mapred daemons
 # start jobtracker first to minimize connection errors at startup
-"$bin"/hadoop-daemon.sh start jobtracker
-"$bin"/hadoop-daemons.sh start tasktracker
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start jobtracker
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start tasktracker

+ 4 - 2
bin/stop-all.sh

@@ -5,5 +5,7 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-"$bin"/stop-mapred.sh
-"$bin"/stop-dfs.sh
+source "$bin"/hadoop-config.sh
+
+"$bin"/stop-mapred.sh --config $HADOOP_CONF_DIR
+"$bin"/stop-dfs.sh --config $HADOOP_CONF_DIR

+ 4 - 2
bin/stop-dfs.sh

@@ -5,6 +5,8 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-"$bin"/hadoop-daemon.sh stop namenode
-"$bin"/hadoop-daemons.sh stop datanode
+source "$bin"/hadoop-config.sh
+
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop namenode
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop datanode
 

+ 4 - 2
bin/stop-mapred.sh

@@ -5,6 +5,8 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-"$bin"/hadoop-daemon.sh stop jobtracker
-"$bin"/hadoop-daemons.sh stop tasktracker
+source "$bin"/hadoop-config.sh
+
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop jobtracker
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop tasktracker