Browse Source

Merge -r 1170232:1170233 from trunk to branch. Fixes: HDFS-2323.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1170241 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 13 năm trước cách đây
mục cha
commit
3f6ae51a7a

+ 1 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -1007,6 +1007,7 @@ Release 0.23.0 - Unreleased
 
     HDFS-2314. MRV1 test compilation broken after HDFS-2197 (todd)
 
+    HDFS-2323. start-dfs.sh script fails for tarball install (tomwhite)
 
   BREAKDOWN OF HDFS-1073 SUBTASKS
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh

@@ -51,7 +51,7 @@ NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
 
 echo "Starting namenodes on [$NAMENODES]"
 
-"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
+"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
   --config "$HADOOP_CONF_DIR" \
   --hostnames "$NAMENODES" \
   --script "$bin/hdfs" start namenode $nameStartOpt
@@ -64,7 +64,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
     "Attempting to start secure cluster, skipping datanodes. " \
     "Run start-secure-dns.sh as root to complete startup."
 else
-  "$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
+  "$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
     --config "$HADOOP_CONF_DIR" \
     --script "$bin/hdfs" start datanode $dataStartOpt
 fi
@@ -84,7 +84,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
 else
   echo "Starting secondary namenodes [$SECONDARY_NAMENODES]"
 
-  "$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
+  "$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
     --config "$HADOOP_CONF_DIR" \
     --hostnames "$SECONDARY_NAMENODES" \
     --script "$bin/hdfs" start secondarynamenode

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh

@@ -25,7 +25,7 @@ bin=`cd "$bin"; pwd`
 . "$bin"/../libexec/hdfs-config.sh
 
 if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
-  "$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
+  "$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
 else
   echo $usage
 fi

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh

@@ -27,7 +27,7 @@ NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
 
 echo "Stopping namenodes on [$NAMENODES]"
 
-"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
+"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
   --config "$HADOOP_CONF_DIR" \
   --hostnames "$NAMENODES" \
   --script "$bin/hdfs" stop namenode
@@ -40,7 +40,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
     "Attempting to stop secure cluster, skipping datanodes. " \
     "Run stop-secure-dns.sh as root to complete shutdown."
 else
-  "$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
+  "$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
     --config "$HADOOP_CONF_DIR" \
     --script "$bin/hdfs" stop datanode
 fi
@@ -60,7 +60,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
 else
   echo "Stopping secondary namenodes [$SECONDARY_NAMENODES]"
 
-  "$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
+  "$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
     --config "$HADOOP_CONF_DIR" \
     --hostnames "$SECONDARY_NAMENODES" \
     --script "$bin/hdfs" stop secondarynamenode

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh

@@ -25,7 +25,7 @@ bin=`cd "$bin"; pwd`
 . "$bin"/../libexec/hdfs-config.sh
 
 if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
-  "$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
+  "$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
 else
   echo $usage
 fi

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java

@@ -227,7 +227,10 @@ public class GetConf extends Configured implements Tool {
   void printList(List<InetSocketAddress> list) {
     StringBuilder buffer = new StringBuilder();
     for (InetSocketAddress address : list) {
-      buffer.append(address.getHostName()).append(" ");
+      if (buffer.length() > 0) {
+        buffer.append(" ");
+      }
+      buffer.append(address.getHostName());
     }
     printOut(buffer.toString());
   }