浏览代码

Escape paths so that spaces are permitted (as is common on Windows.)

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@381793 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 19 年之前
父节点
当前提交
e3970330a9
共有 6 个文件被更改,包括 28 次插入28 次删除
  1. 1 1
      bin/hadoop
  2. 7 7
      bin/hadoop-daemon.sh
  3. 3 3
      bin/hadoop-daemons.sh
  4. 5 5
      bin/slaves.sh
  5. 6 6
      bin/start-all.sh
  6. 6 6
      bin/stop-all.sh

+ 1 - 1
bin/hadoop

@@ -50,7 +50,7 @@ THIS_DIR=`dirname "$THIS"`
 HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
 
 if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source ${HADOOP_HOME}/conf/hadoop-env.sh
+  source "${HADOOP_HOME}/conf/hadoop-env.sh"
 fi
 
 # some Java parameters

+ 7 - 7
bin/hadoop-daemon.sh

@@ -37,16 +37,16 @@ while [ -h "$this" ]; do
 done
 
 # the root of the Hadoop installation
-HADOOP_HOME=`dirname $this`/..
+HADOOP_HOME=`dirname "$this"`/..
 
 if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source ${HADOOP_HOME}/conf/hadoop-env.sh
+  source "${HADOOP_HOME}/conf/hadoop-env.sh"
 fi
 
 # get log directory
 if [ "$HADOOP_LOG_DIR" = "" ]; then
-  HADOOP_LOG_DIR=$HADOOP_HOME/logs
-  mkdir -p $HADOOP_LOG_DIR
+  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+  mkdir -p "$HADOOP_LOG_DIR"
 fi
 
 if [ "$HADOOP_PID_DIR" = "" ]; then
@@ -77,11 +77,11 @@ case $startStop in
       rsync -a --delete --exclude=.svn $HADOOP_MASTER/ $HADOOP_HOME
     fi
 
-    cd $HADOOP_HOME
+    cd "$HADOOP_HOME"
     echo starting $command, logging to $log
-    nohup bin/hadoop $command "$@" >& $log < /dev/null &
+    nohup bin/hadoop $command "$@" >& "$log" < /dev/null &
     echo $! > $pid
-    sleep 1; head $log
+    sleep 1; head "$log"
     ;;
           
   (stop)

+ 3 - 3
bin/hadoop-daemons.sh

@@ -10,7 +10,7 @@ if [ $# -le 1 ]; then
   exit 1
 fi
 
-bin=`dirname $0`
-bin=`cd $bin; pwd`
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-exec $bin/slaves.sh $bin/hadoop-daemon.sh "$@"
+exec "$bin/slaves.sh" "$bin/hadoop-daemon.sh" "$@"

+ 5 - 5
bin/slaves.sh

@@ -28,18 +28,18 @@ while [ -h "$this" ]; do
 done
 
 # the root of the Hadoop installation
-HADOOP_HOME=`dirname $this`/..
+HADOOP_HOME=`dirname "$this"`/..
 
 if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source ${HADOOP_HOME}/conf/hadoop-env.sh
+  source "${HADOOP_HOME}/conf/hadoop-env.sh"
 fi
 
 if [ "$HADOOP_SLAVES" = "" ]; then
-  export HADOOP_SLAVES=$HADOOP_HOME/conf/slaves
+  export HADOOP_SLAVES="$HADOOP_HOME/conf/slaves"
 fi
 
-for slave in `cat $HADOOP_SLAVES`; do
- ssh -o ConnectTimeout=1 $slave "$@" \
+for slave in `cat "$HADOOP_SLAVES"`; do
+ ssh -o ConnectTimeout=1 $slave $"${@// /\\ }" \
    2>&1 | sed "s/^/$slave: /" &
 done
 

+ 6 - 6
bin/start-all.sh

@@ -2,10 +2,10 @@
 
 # Start all hadoop daemons.  Run this on master node.
 
-bin=`dirname $0`
-bin=`cd $bin; pwd`
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-$bin/hadoop-daemons.sh start datanode
-$bin/hadoop-daemon.sh start namenode
-$bin/hadoop-daemon.sh start jobtracker
-$bin/hadoop-daemons.sh start tasktracker
+"$bin"/hadoop-daemons.sh start datanode
+"$bin"/hadoop-daemon.sh start namenode
+"$bin"/hadoop-daemon.sh start jobtracker
+"$bin"/hadoop-daemons.sh start tasktracker

+ 6 - 6
bin/stop-all.sh

@@ -2,10 +2,10 @@
 
 # Stop all hadoop daemons.  Run this on master node.
 
-bin=`dirname $0`
-bin=`cd $bin; pwd`
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-$bin/hadoop-daemon.sh stop jobtracker
-$bin/hadoop-daemons.sh stop tasktracker
-$bin/hadoop-daemon.sh stop namenode
-$bin/hadoop-daemons.sh stop datanode
+"$bin"/hadoop-daemon.sh stop jobtracker
+"$bin"/hadoop-daemons.sh stop tasktracker
+"$bin"/hadoop-daemon.sh stop namenode
+"$bin"/hadoop-daemons.sh stop datanode