Browse Source

HADOOP-4868. Splits the hadoop script into three parts - bin/hadoop, bin/mapred and bin/hdfs. Contributed by Sharad Agarwal.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@742937 13f79535-47bb-0310-9956-ffa450edef68
Devaraj Das 16 years ago
parent
commit
ea95b2113c
16 changed files with 555 additions and 279 deletions
  1. 3 0
      CHANGES.txt
  2. 66 251
      bin/hadoop
  3. 181 0
      bin/hadoop-config.sh
  4. 4 2
      bin/hadoop-daemon.sh
  5. 99 0
      bin/hdfs
  6. 33 0
      bin/hdfs-config.sh
  7. 96 0
      bin/mapred
  8. 33 0
      bin/mapred-config.sh
  9. 10 4
      bin/start-all.sh
  10. 2 2
      bin/start-balancer.sh
  11. 4 4
      bin/start-dfs.sh
  12. 3 3
      bin/start-mapred.sh
  13. 12 2
      bin/stop-all.sh
  14. 2 2
      bin/stop-balancer.sh
  15. 4 5
      bin/stop-dfs.sh
  16. 3 4
      bin/stop-mapred.sh

+ 3 - 0
CHANGES.txt

@@ -92,6 +92,9 @@ Trunk (unreleased changes)
     tools, and example jars. Let findbugs depend on this rather than the 'tar'
     tools, and example jars. Let findbugs depend on this rather than the 'tar'
     target. (Giridharan Kesavan via cdouglas)
     target. (Giridharan Kesavan via cdouglas)
 
 
+    HADOOP-4868. Splits the hadoop script into three parts - bin/hadoop, bin/mapred and 
+    bin/hdfs. (Sharad Agarwal via ddas)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
   BUG FIXES
   BUG FIXES

+ 66 - 251
bin/hadoop

@@ -15,60 +15,17 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
-
-# The Hadoop command script
-#
-# Environment Variables
-#
-#   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
-#
-#   HADOOP_CLASSPATH Extra Java CLASSPATH entries.
-#
-#   HADOOP_HEAPSIZE  The maximum amount of heap to use, in MB. 
-#                    Default is 1000.
-#
-#   HADOOP_OPTS      Extra Java runtime options.
-#   
-#   HADOOP_NAMENODE_OPTS       These options are added to HADOOP_OPTS 
-#   HADOOP_CLIENT_OPTS         when the respective command is run.
-#   HADOOP_{COMMAND}_OPTS etc  HADOOP_JT_OPTS applies to JobTracker 
-#                              for e.g.  HADOOP_CLIENT_OPTS applies to 
-#                              more than one command (fs, dfs, fsck, 
-#                              dfsadmin etc)  
-#
-#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
-#
-#   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
-#
+# This script runs the hadoop core commands. 
 
 
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
-
+ 
 . "$bin"/hadoop-config.sh
 . "$bin"/hadoop-config.sh
 
 
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
-# if no args specified, show usage
-if [ $# = 0 ]; then
+function print_usage(){
   echo "Usage: hadoop [--config confdir] COMMAND"
   echo "Usage: hadoop [--config confdir] COMMAND"
-  echo "where COMMAND is one of:"
-  echo "  namenode -format     format the DFS filesystem"
-  echo "  secondarynamenode    run the DFS secondary namenode"
-  echo "  namenode             run the DFS namenode"
-  echo "  datanode             run a DFS datanode"
-  echo "  dfsadmin             run a DFS admin client"
-  echo "  mradmin              run a Map-Reduce admin client"
-  echo "  fsck                 run a DFS filesystem checking utility"
+  echo "       where COMMAND is one of:"
   echo "  fs                   run a generic filesystem user client"
   echo "  fs                   run a generic filesystem user client"
-  echo "  balancer             run a cluster balancing utility"
-  echo "  jobtracker           run the MapReduce job Tracker node" 
-  echo "  pipes                run a Pipes job"
-  echo "  tasktracker          run a MapReduce task Tracker node" 
-  echo "  job                  manipulate MapReduce jobs"
-  echo "  queue                get information regarding JobQueues" 
   echo "  version              print the version"
   echo "  version              print the version"
   echo "  jar <jar>            run a jar file"
   echo "  jar <jar>            run a jar file"
   echo "  distcp <srcurl> <desturl> copy file or directories recursively"
   echo "  distcp <srcurl> <desturl> copy file or directories recursively"
@@ -76,214 +33,72 @@ if [ $# = 0 ]; then
   echo "  daemonlog            get/set the log level for each daemon"
   echo "  daemonlog            get/set the log level for each daemon"
   echo " or"
   echo " or"
   echo "  CLASSNAME            run the class named CLASSNAME"
   echo "  CLASSNAME            run the class named CLASSNAME"
+  echo ""
   echo "Most commands print help when invoked w/o parameters."
   echo "Most commands print help when invoked w/o parameters."
-  exit 1
-fi
-
-# get arguments
-COMMAND=$1
-shift
-
-if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
-  . "${HADOOP_CONF_DIR}/hadoop-env.sh"
-fi
-
-# some Java parameters
-if [ "$JAVA_HOME" != "" ]; then
-  #echo "run java in $JAVA_HOME"
-  JAVA_HOME=$JAVA_HOME
-fi
-  
-if [ "$JAVA_HOME" = "" ]; then
-  echo "Error: JAVA_HOME is not set."
-  exit 1
-fi
-
-JAVA=$JAVA_HOME/bin/java
-JAVA_HEAP_MAX=-Xmx1000m 
-
-# check envvars which might override default args
-if [ "$HADOOP_HEAPSIZE" != "" ]; then
-  #echo "run with heapsize $HADOOP_HEAPSIZE"
-  JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
-  #echo $JAVA_HEAP_MAX
-fi
-
-# CLASSPATH initially contains $HADOOP_CONF_DIR
-CLASSPATH="${HADOOP_CONF_DIR}"
-CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
-
-# for developers, add Hadoop classes to CLASSPATH
-if [ -d "$HADOOP_HOME/build/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
-fi
-if [ -d "$HADOOP_HOME/build/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
-fi
-if [ -d "$HADOOP_HOME/build/test/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
-fi
-if [ -d "$HADOOP_HOME/build/tools" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/tools
-fi
-
-# so that filenames w/ spaces are handled correctly in loops below
-IFS=
-
-# for releases, add core hadoop jar & webapps to CLASSPATH
-if [ -d "$HADOOP_HOME/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME
-fi
-for f in $HADOOP_HOME/hadoop-*-core.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add libs to CLASSPATH
-for f in $HADOOP_HOME/lib/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
-for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-fi
-
-for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
+}
 
 
-for f in $HADOOP_HOME/hadoop-*-tools.jar; do
-  TOOL_PATH=${TOOL_PATH}:$f;
-done
-for f in $HADOOP_HOME/build/hadoop-*-tools.jar; do
-  TOOL_PATH=${TOOL_PATH}:$f;
-done
-
-# add user-specified CLASSPATH last
-if [ "$HADOOP_CLASSPATH" != "" ]; then
-  CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
-fi
-
-# default log directory & file
-if [ "$HADOOP_LOG_DIR" = "" ]; then
-  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
-fi
-if [ "$HADOOP_LOGFILE" = "" ]; then
-  HADOOP_LOGFILE='hadoop.log'
-fi
-
-# default policy file for service-level authorization
-if [ "$HADOOP_POLICYFILE" = "" ]; then
-  HADOOP_POLICYFILE="hadoop-policy.xml"
-fi
-
-# restore ordinary behaviour
-unset IFS
-
-# figure out which class to run
-if [ "$COMMAND" = "namenode" ] ; then
-  CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
-elif [ "$COMMAND" = "secondarynamenode" ] ; then
-  CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
-elif [ "$COMMAND" = "datanode" ] ; then
-  CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
-elif [ "$COMMAND" = "fs" ] ; then
-  CLASS=org.apache.hadoop.fs.FsShell
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "dfs" ] ; then
-  CLASS=org.apache.hadoop.fs.FsShell
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "dfsadmin" ] ; then
-  CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "mradmin" ] ; then
-  CLASS=org.apache.hadoop.mapred.tools.MRAdmin
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "fsck" ] ; then
-  CLASS=org.apache.hadoop.hdfs.tools.DFSck
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "balancer" ] ; then
-  CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
-elif [ "$COMMAND" = "jobtracker" ] ; then
-  CLASS=org.apache.hadoop.mapred.JobTracker
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
-elif [ "$COMMAND" = "tasktracker" ] ; then
-  CLASS=org.apache.hadoop.mapred.TaskTracker
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
-elif [ "$COMMAND" = "job" ] ; then
-  CLASS=org.apache.hadoop.mapred.JobClient
-elif [ "$COMMAND" = "queue" ] ; then
-  CLASS=org.apache.hadoop.mapred.JobQueueClient
-elif [ "$COMMAND" = "pipes" ] ; then
-  CLASS=org.apache.hadoop.mapred.pipes.Submitter
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "version" ] ; then
-  CLASS=org.apache.hadoop.util.VersionInfo
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "jar" ] ; then
-  CLASS=org.apache.hadoop.util.RunJar
-elif [ "$COMMAND" = "distcp" ] ; then
-  CLASS=org.apache.hadoop.tools.DistCp
-  CLASSPATH=${CLASSPATH}:${TOOL_PATH}
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "daemonlog" ] ; then
-  CLASS=org.apache.hadoop.log.LogLevel
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "archive" ] ; then
-  CLASS=org.apache.hadoop.tools.HadoopArchives
-  CLASSPATH=${CLASSPATH}:${TOOL_PATH}
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "sampler" ] ; then
-  CLASS=org.apache.hadoop.mapred.lib.InputSampler
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-else
-  CLASS=$COMMAND
+if [ $# = 0 ]; then
+  print_usage
+  exit
 fi
 fi
 
 
-# cygwin path translation
-if $cygwin; then
-  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-  HADOOP_HOME=`cygpath -w "$HADOOP_HOME"`
-  HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
-  TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
-fi
-# setup 'java.library.path' for native-hadoop code if necessary
-JAVA_LIBRARY_PATH=''
-if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
-  JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
-  
-  if [ -d "$HADOOP_HOME/build/native" ]; then
-    JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
-  fi
-  
-  if [ -d "${HADOOP_HOME}/lib/native" ]; then
-    if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
+COMMAND=$1
+case $COMMAND in
+  #hdfs commands
+  namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer)
+    echo "DEPRECATED: Use of this script to execute hdfs command is deprecated."
+    echo "Instead use the hdfs command for it."
+    echo ""
+    #try to locate hdfs and if present, delegate to it.  
+    if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
+      "${HADOOP_HDFS_HOME}"/bin/hdfs $*  
     else
     else
-      JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
+      echo "HDFS not found."
+      exit
     fi
     fi
-  fi
-fi
-
-# cygwin path translation
-if $cygwin; then
-  JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
-
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
-if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-  HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
-fi  
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
+    ;;
+
+  #mapred commands  
+  mradmin|jobtracker|tasktracker|pipes|job|queue)
+    echo "DEPRECATED: Use of this script to execute mapred command is deprecated."
+    echo "Instead use the mapred command for it."
+    echo ""
+    #try to locate mapred and if present, delegate to it.
+    if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
+      "${HADOOP_MAPRED_HOME}"/bin/mapred $*  
+    else
+      echo "MAPRED not found."
+      exit
+    fi
+    ;;
+
+  #core commands  
+  *)
+    # the core commands
+    if [ "$COMMAND" = "fs" ] ; then
+      CLASS=org.apache.hadoop.fs.FsShell
+      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+    elif [ "$COMMAND" = "version" ] ; then
+      CLASS=org.apache.hadoop.util.VersionInfo
+      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+    elif [ "$COMMAND" = "jar" ] ; then
+      CLASS=org.apache.hadoop.util.RunJar
+    elif [ "$COMMAND" = "distcp" ] ; then
+      CLASS=org.apache.hadoop.tools.DistCp
+      CLASSPATH=${CLASSPATH}:${TOOL_PATH}
+      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+    elif [ "$COMMAND" = "daemonlog" ] ; then
+      CLASS=org.apache.hadoop.log.LogLevel
+      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+    elif [ "$COMMAND" = "archive" ] ; then
+      CLASS=org.apache.hadoop.tools.HadoopArchives
+      CLASSPATH=${CLASSPATH}:${TOOL_PATH}
+      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+    else
+      CLASS=$COMMAND
+    fi
+    shift
+    exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
+    ;;
 
 
-# run it
-exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
+esac

+ 181 - 0
bin/hadoop-config.sh

@@ -37,7 +37,11 @@ bin=`cd "$bin"; pwd`
 this="$bin/$script"
 this="$bin/$script"
 
 
 # the root of the Hadoop installation
 # the root of the Hadoop installation
+#TODO: change the env variable when dir structure is changed
 export HADOOP_HOME=`dirname "$this"`/..
 export HADOOP_HOME=`dirname "$this"`/..
+export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$HADOOP_HOME}"
+#export HADOOP_HOME=`dirname "$this"`/../..
+#export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-`dirname "$this"`/..}"
 
 
 #check to see if the conf dir is given as an optional argument
 #check to see if the conf dir is given as an optional argument
 if [ $# -gt 1 ]
 if [ $# -gt 1 ]
@@ -66,3 +70,180 @@ then
         export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"
         export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"
     fi
     fi
 fi
 fi
+
+cygwin=false
+case "`uname`" in
+CYGWIN*) cygwin=true;;
+esac
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  . "${HADOOP_CONF_DIR}/hadoop-env.sh"
+fi
+
+# some Java parameters
+if [ "$JAVA_HOME" != "" ]; then
+  #echo "run java in $JAVA_HOME"
+  JAVA_HOME=$JAVA_HOME
+fi
+  
+if [ "$JAVA_HOME" = "" ]; then
+  echo "Error: JAVA_HOME is not set."
+  exit 1
+fi
+
+JAVA=$JAVA_HOME/bin/java
+JAVA_HEAP_MAX=-Xmx1000m 
+
+# check envvars which might override default args
+if [ "$HADOOP_HEAPSIZE" != "" ]; then
+  #echo "run with heapsize $HADOOP_HEAPSIZE"
+  JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
+  #echo $JAVA_HEAP_MAX
+fi
+
+# CLASSPATH initially contains $HADOOP_CONF_DIR
+CLASSPATH="${HADOOP_CONF_DIR}"
+CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
+
+# for developers, add Hadoop classes to CLASSPATH
+if [ -d "$HADOOP_CORE_HOME/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/classes
+fi
+if [ -d "$HADOOP_CORE_HOME/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build
+fi
+if [ -d "$HADOOP_CORE_HOME/build/test/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/test/classes
+fi
+if [ -d "$HADOOP_CORE_HOME/build/tools" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/tools
+fi
+
+# so that filenames w/ spaces are handled correctly in loops below
+IFS=
+
+# for releases, add core hadoop jar & webapps to CLASSPATH
+if [ -d "$HADOOP_CORE_HOME/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME
+fi
+for f in $HADOOP_CORE_HOME/hadoop-*-core.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HADOOP_CORE_HOME/lib/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+if [ -d "$HADOOP_CORE_HOME/build/ivy/lib/Hadoop/common" ]; then
+for f in $HADOOP_CORE_HOME/build/ivy/lib/Hadoop/common/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+fi
+
+for f in $HADOOP_CORE_HOME/lib/jsp-2.1/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+for f in $HADOOP_CORE_HOME/hadoop-*-tools.jar; do
+  TOOL_PATH=${TOOL_PATH}:$f;
+done
+for f in $HADOOP_CORE_HOME/build/hadoop-*-tools.jar; do
+  TOOL_PATH=${TOOL_PATH}:$f;
+done
+
+# add user-specified CLASSPATH last
+if [ "$HADOOP_CLASSPATH" != "" ]; then
+  CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
+fi
+
+# default log directory & file
+if [ "$HADOOP_LOG_DIR" = "" ]; then
+  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+fi
+if [ "$HADOOP_LOGFILE" = "" ]; then
+  HADOOP_LOGFILE='hadoop.log'
+fi
+
+# default policy file for service-level authorization
+if [ "$HADOOP_POLICYFILE" = "" ]; then
+  HADOOP_POLICYFILE="hadoop-policy.xml"
+fi
+
+# restore ordinary behaviour
+unset IFS
+
+# cygwin path translation
+if $cygwin; then
+  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+  HADOOP_CORE_HOME=`cygpath -w "$HADOOP_CORE_HOME"`
+  HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
+  TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
+fi
+# setup 'java.library.path' for native-hadoop code if necessary
+JAVA_LIBRARY_PATH=''
+if [ -d "${HADOOP_CORE_HOME}/build/native" -o -d "${HADOOP_CORE_HOME}/lib/native" ]; then
+  JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
+  
+  if [ -d "$HADOOP_CORE_HOME/build/native" ]; then
+    JAVA_LIBRARY_PATH=${HADOOP_CORE_HOME}/build/native/${JAVA_PLATFORM}/lib
+  fi
+  
+  if [ -d "${HADOOP_CORE_HOME}/lib/native" ]; then
+    if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_CORE_HOME}/lib/native/${JAVA_PLATFORM}
+    else
+      JAVA_LIBRARY_PATH=${HADOOP_CORE_HOME}/lib/native/${JAVA_PLATFORM}
+    fi
+  fi
+fi
+
+# cygwin path translation
+if $cygwin; then
+  JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
+fi
+
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_CORE_HOME"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
+if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+  HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
+fi  
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
+
+# put hdfs in classpath if present
+if [ "$HADOOP_HDFS_HOME" = "" ]; then
+  if [ -d "${HADOOP_HOME}/hdfs" ]; then
+    HADOOP_HDFS_HOME=$HADOOP_HOME/hdfs
+    echo Found HDFS installed at $HADOOP_HDFS_HOME
+  fi
+fi
+
+if [ -d "${HADOOP_HDFS_HOME}" ]; then
+  for f in $HADOOP_HDFS_HOME/hadoop-*-hdfs.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  # add libs to CLASSPATH
+  for f in $HADOOP_HDFS_HOME/lib/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+  
+  if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
+    CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
+  fi
+fi
+
+# set mapred home if mapred is present
+if [ "$HADOOP_MAPRED_HOME" = "" ]; then
+  if [ -d "${HADOOP_HOME}/mapred" ]; then
+    HADOOP_MAPRED_HOME=$HADOOP_HOME/mapred
+    echo Found MAPRED installed at $HADOOP_MAPRED_HOME
+  fi
+fi
+
+# TODO:remove this when dir structure is changed
+export HADOOP_HDFS_HOME=$HADOOP_HOME
+export HADOOP_MAPRED_HOME=$HADOOP_HOME

+ 4 - 2
bin/hadoop-daemon.sh

@@ -28,7 +28,7 @@
 #   HADOOP_NICENESS The scheduling priority for daemons. Defaults to 0.
 #   HADOOP_NICENESS The scheduling priority for daemons. Defaults to 0.
 ##
 ##
 
 
-usage="Usage: hadoop-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) <hadoop-command> <args...>"
+usage="Usage: hadoop-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] <hadoop-script> (start|stop) <hadoop-command> <args...>"
 
 
 # if no args specified, show usage
 # if no args specified, show usage
 if [ $# -le 1 ]; then
 if [ $# -le 1 ]; then
@@ -42,6 +42,8 @@ bin=`cd "$bin"; pwd`
 . "$bin"/hadoop-config.sh
 . "$bin"/hadoop-config.sh
 
 
 # get arguments
 # get arguments
+hadoopScript=$1
+shift
 startStop=$1
 startStop=$1
 shift
 shift
 command=$1
 command=$1
@@ -114,7 +116,7 @@ case $startStop in
     hadoop_rotate_log $log
     hadoop_rotate_log $log
     echo starting $command, logging to $log
     echo starting $command, logging to $log
     cd "$HADOOP_HOME"
     cd "$HADOOP_HOME"
-    nohup nice -n $HADOOP_NICENESS "$HADOOP_HOME"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
+    nohup nice -n $HADOOP_NICENESS $hadoopScript --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
     echo $! > $pid
     echo $! > $pid
     sleep 1; head "$log"
     sleep 1; head "$log"
     ;;
     ;;

+ 99 - 0
bin/hdfs

@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hdfs-config.sh
+
+function print_usage(){
+  echo "Usage: hdfs [--config confdir] COMMAND"
+  echo "       where COMMAND is one of:"
+  echo "  namenode -format     format the DFS filesystem"
+  echo "  secondarynamenode    run the DFS secondary namenode"
+  echo "  namenode             run the DFS namenode"
+  echo "  datanode             run a DFS datanode"
+  echo "  dfsadmin             run a DFS admin client"
+  echo "  fsck                 run a DFS filesystem checking utility"
+  echo "  balancer             run a cluster balancing utility"
+  echo ""
+  echo "Most commands print help when invoked w/o parameters."
+}
+
+if [ $# = 0 ]; then
+  print_usage
+  exit
+fi
+
+COMMAND=$1
+shift
+
+if [ "$COMMAND" = "namenode" ] ; then
+  CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
+elif [ "$COMMAND" = "secondarynamenode" ] ; then
+  CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
+elif [ "$COMMAND" = "datanode" ] ; then
+  CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
+elif [ "$COMMAND" = "dfs" ] ; then
+  CLASS=org.apache.hadoop.fs.FsShell
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "dfsadmin" ] ; then
+  CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "fsck" ] ; then
+  CLASS=org.apache.hadoop.hdfs.tools.DFSck
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "balancer" ] ; then
+  CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
+else
+  echo $COMMAND - invalid command
+  print_usage
+  exit
+fi
+
+# for developers, add hdfs classes to CLASSPATH
+if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
+fi
+if [ -d "$HADOOP_HDFS_HOME/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build
+fi
+if [ -d "$HADOOP_HDFS_HOME/build/test/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/test/classes
+fi
+if [ -d "$HADOOP_HDFS_HOME/build/tools" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/tools
+fi
+
+# for releases, add core hdfs jar & webapps to CLASSPATH
+if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
+fi
+for f in $HADOOP_HDFS_HOME/hadoop-*-hdfs.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HADOOP_HDFS_HOME/lib/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

+ 33 - 0
bin/hdfs-config.sh

@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# included in all the hdfs scripts with source command
+# should not be executed directly
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+#TODO: change the env variable when directory structure is changed
+export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/..}"
+#export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/../../core}"
+
+if [ -d "${HADOOP_CORE_HOME}" ]; then
+  . "$HADOOP_CORE_HOME"/bin/hadoop-config.sh
+else
+  echo "Hadoop core not found."
+  exit
+fi

+ 96 - 0
bin/mapred

@@ -0,0 +1,96 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. $bin/mapred-config.sh
+
+function print_usage(){
+  echo "Usage: mapred [--config confdir] COMMAND"
+  echo "       where COMMAND is one of:"
+  echo "  mradmin              run a Map-Reduce admin client"
+  echo "  jobtracker           run the MapReduce job Tracker node" 
+  echo "  tasktracker          run a MapReduce task Tracker node" 
+  echo "  pipes                run a Pipes job"
+  echo "  job                  manipulate MapReduce jobs"
+  echo "  queue                get information regarding JobQueues"
+  echo ""
+  echo "Most commands print help when invoked w/o parameters."
+}
+
+if [ $# = 0 ]; then
+  print_usage
+  exit
+fi
+
+COMMAND=$1
+shift
+
+if [ "$COMMAND" = "mradmin" ] ; then
+  CLASS=org.apache.hadoop.mapred.tools.MRAdmin
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "jobtracker" ] ; then
+  CLASS=org.apache.hadoop.mapred.JobTracker
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
+elif [ "$COMMAND" = "tasktracker" ] ; then
+  CLASS=org.apache.hadoop.mapred.TaskTracker
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
+elif [ "$COMMAND" = "job" ] ; then
+  CLASS=org.apache.hadoop.mapred.JobClient
+elif [ "$COMMAND" = "queue" ] ; then
+  CLASS=org.apache.hadoop.mapred.JobQueueClient
+elif [ "$COMMAND" = "pipes" ] ; then
+  CLASS=org.apache.hadoop.mapred.pipes.Submitter
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "sampler" ] ; then
+  CLASS=org.apache.hadoop.mapred.lib.InputSampler
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+else
+  echo $COMMAND - invalid command
+  print_usage
+  exit
+fi
+
+# for developers, add mapred classes to CLASSPATH
+if [ -d "$HADOOP_MAPRED_HOME/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/classes
+fi
+if [ -d "$HADOOP_MAPRED_HOME/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build
+fi
+if [ -d "$HADOOP_MAPRED_HOME/build/test/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/test/classes
+fi
+if [ -d "$HADOOP_MAPRED_HOME/build/tools" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/tools
+fi
+
+# for releases, add core mapred jar & webapps to CLASSPATH
+if [ -d "$HADOOP_MAPRED_HOME/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME
+fi
+for f in $HADOOP_MAPRED_HOME/hadoop-*-mapred.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HADOOP_MAPRED_HOME/lib/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

+ 33 - 0
bin/mapred-config.sh

@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# included in all the mapred scripts with source command
+# should not be executed directly
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+#TODO: change the env variable when directory structure is changed
+export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/..}"
+#export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/../../core}"
+
+if [ -d "${HADOOP_CORE_HOME}" ]; then
+  . "$HADOOP_CORE_HOME"/bin/hadoop-config.sh
+else
+  echo "Hadoop core not found."
+  exit
+fi

+ 10 - 4
bin/start-all.sh

@@ -18,13 +18,19 @@
 
 
 # Start all hadoop daemons.  Run this on master node.
 # Start all hadoop daemons.  Run this on master node.
 
 
+echo "This script is Deprecated. Instead use start-dfs.sh and start-mapred.sh"
+
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
 . "$bin"/hadoop-config.sh
 . "$bin"/hadoop-config.sh
 
 
-# start dfs daemons
-"$bin"/start-dfs.sh --config $HADOOP_CONF_DIR
+# start hdfs daemons if hdfs is present
+if [ -f "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh ]; then
+  "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh --config $HADOOP_CONF_DIR
+fi
 
 
-# start mapred daemons
-"$bin"/start-mapred.sh --config $HADOOP_CONF_DIR
+# start mapred daemons if mapred is present
+if [ -f "${HADOOP_MAPRED_HOME}"/bin/start-mapred.sh ]; then
+  "${HADOOP_MAPRED_HOME}"/bin/start-mapred.sh --config $HADOOP_CONF_DIR
+fi

+ 2 - 2
bin/start-balancer.sh

@@ -18,8 +18,8 @@
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
-. "$bin"/hadoop-config.sh
+. "$bin"/hdfs-config.sh
 
 
 # Start balancer daemon.
 # Start balancer daemon.
 
 
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start balancer $@
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs start balancer $@

+ 4 - 4
bin/start-dfs.sh

@@ -25,7 +25,7 @@ usage="Usage: start-dfs.sh [-upgrade|-rollback]"
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
-. "$bin"/hadoop-config.sh
+. "$bin"/hdfs-config.sh
 
 
 # get arguments
 # get arguments
 if [ $# -ge 1 ]; then
 if [ $# -ge 1 ]; then
@@ -47,6 +47,6 @@ fi
 # start dfs daemons
 # start dfs daemons
 # start namenode after datanodes, to minimize time namenode is up w/o data
 # start namenode after datanodes, to minimize time namenode is up w/o data
 # note: datanodes will log connection errors until namenode starts
 # note: datanodes will log connection errors until namenode starts
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start namenode $nameStartOpt
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start datanode $dataStartOpt
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters start secondarynamenode
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs start namenode $nameStartOpt
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR "$bin"/hdfs start datanode $dataStartOpt
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters "$bin"/hdfs start secondarynamenode

+ 3 - 3
bin/start-mapred.sh

@@ -21,9 +21,9 @@
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
-. "$bin"/hadoop-config.sh
+. $bin/mapred-config.sh
 
 
 # start mapred daemons
 # start mapred daemons
 # start jobtracker first to minimize connection errors at startup
 # start jobtracker first to minimize connection errors at startup
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start jobtracker
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start tasktracker
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/mapred start jobtracker
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR "$bin"/mapred start tasktracker

+ 12 - 2
bin/stop-all.sh

@@ -18,10 +18,20 @@
 
 
 # Stop all hadoop daemons.  Run this on master node.
 # Stop all hadoop daemons.  Run this on master node.
 
 
+echo "This script is Deprecated. Instead use stop-dfs.sh and stop-mapred.sh"
+
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
 . "$bin"/hadoop-config.sh
 . "$bin"/hadoop-config.sh
 
 
-"$bin"/stop-mapred.sh --config $HADOOP_CONF_DIR
-"$bin"/stop-dfs.sh --config $HADOOP_CONF_DIR
+# stop hdfs daemons if hdfs is present
+if [ -f "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh ]; then
+  "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh --config $HADOOP_CONF_DIR
+fi
+
+# stop mapred daemons if mapred is present
+if [ -f "${HADOOP_MAPRED_HOME}"/bin/stop-mapred.sh ]; then
+  "${HADOOP_MAPRED_HOME}"/bin/stop-mapred.sh --config $HADOOP_CONF_DIR
+fi
+

+ 2 - 2
bin/stop-balancer.sh

@@ -18,9 +18,9 @@
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
-. "$bin"/hadoop-config.sh
+. "$bin"/hdfs-config.sh
 
 
 # Stop balancer daemon.
 # Stop balancer daemon.
 # Run this on the machine where the balancer is running
 # Run this on the machine where the balancer is running
 
 
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop balancer
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs stop balancer

+ 4 - 5
bin/stop-dfs.sh

@@ -21,9 +21,8 @@
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
-. "$bin"/hadoop-config.sh
-
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop namenode
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop datanode
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters stop secondarynamenode
+. "$bin"/hdfs-config.sh
 
 
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs stop namenode
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR "$bin"/hdfs stop datanode
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters "$bin"/hdfs stop secondarynamenode

+ 3 - 4
bin/stop-mapred.sh

@@ -21,8 +21,7 @@
 bin=`dirname "$0"`
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 bin=`cd "$bin"; pwd`
 
 
-. "$bin"/hadoop-config.sh
-
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop jobtracker
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop tasktracker
+. $bin/mapred-config.sh
 
 
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/mapred stop jobtracker
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR "$bin"/mapred stop tasktracker