hadoop 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. #!/bin/sh
  2. #
  3. # The Hadoop command script
  4. #
  5. # Environment Variables
  6. #
  7. # JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
  8. #
  9. # HADOOP_HEAPSIZE The maximum amount of heap to use, in MB.
  10. # Default is 1000.
  11. #
  12. # HADOOP_OPTS Extra Java runtime options.
  13. #
  14. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  15. #
  16. # HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
  17. #
  18. bin=`dirname "$0"`
  19. bin=`cd "$bin"; pwd`
  20. . "$bin"/hadoop-config.sh
  21. # if no args specified, show usage
  22. if [ $# = 0 ]; then
  23. echo "Usage: hadoop [--config confdir] COMMAND"
  24. echo "where COMMAND is one of:"
  25. echo " namenode -format format the DFS filesystem"
  26. echo " secondarynamenode run the DFS secondary namenode"
  27. echo " namenode run the DFS namenode"
  28. echo " datanode run a DFS datanode"
  29. echo " dfsadmin run a DFS admin client"
  30. echo " fsck run a DFS filesystem checking utility"
  31. echo " fs run a generic filesystem user client"
  32. echo " jobtracker run the MapReduce job Tracker node"
  33. echo " tasktracker run a MapReduce task Tracker node"
  34. echo " job manipulate MapReduce jobs"
  35. echo " version print the version"
  36. echo " jar <jar> run a jar file"
  37. echo " distcp <srcurl> <desturl> copy file or directories recursively"
  38. echo " or"
  39. echo " CLASSNAME run the class named CLASSNAME"
  40. echo "Most commands print help when invoked w/o parameters."
  41. exit 1
  42. fi
  43. # get arguments
  44. COMMAND=$1
  45. shift
  46. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  47. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  48. fi
  49. # some Java parameters
  50. if [ "$JAVA_HOME" != "" ]; then
  51. #echo "run java in $JAVA_HOME"
  52. JAVA_HOME=$JAVA_HOME
  53. fi
  54. if [ "$JAVA_HOME" = "" ]; then
  55. echo "Error: JAVA_HOME is not set."
  56. exit 1
  57. fi
  58. JAVA=$JAVA_HOME/bin/java
  59. JAVA_HEAP_MAX=-Xmx1000m
  60. # check envvars which might override default args
  61. if [ "$HADOOP_HEAPSIZE" != "" ]; then
  62. #echo "run with heapsize $HADOOP_HEAPSIZE"
  63. JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
  64. #echo $JAVA_HEAP_MAX
  65. fi
  66. # CLASSPATH initially contains $HADOOP_CONF_DIR
  67. CLASSPATH="${HADOOP_CONF_DIR}"
  68. CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
  69. # for developers, add Hadoop classes to CLASSPATH
  70. if [ -d "$HADOOP_HOME/build/classes" ]; then
  71. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
  72. fi
  73. if [ -d "$HADOOP_HOME/build/webapps" ]; then
  74. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
  75. fi
  76. if [ -d "$HADOOP_HOME/build/test/classes" ]; then
  77. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
  78. fi
  79. # so that filenames w/ spaces are handled correctly in loops below
  80. IFS=
  81. # for releases, add core hadoop jar & webapps to CLASSPATH
  82. if [ -d "$HADOOP_HOME/webapps" ]; then
  83. CLASSPATH=${CLASSPATH}:$HADOOP_HOME
  84. fi
  85. for f in $HADOOP_HOME/hadoop-*-core.jar; do
  86. CLASSPATH=${CLASSPATH}:$f;
  87. done
  88. # add libs to CLASSPATH
  89. for f in $HADOOP_HOME/lib/*.jar; do
  90. CLASSPATH=${CLASSPATH}:$f;
  91. done
  92. for f in $HADOOP_HOME/lib/jetty-ext/*.jar; do
  93. CLASSPATH=${CLASSPATH}:$f;
  94. done
  95. # setup 'java.library.path' for native-hadoop code if necessary
  96. JAVA_LIBRARY_PATH=''
  97. if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
  98. JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName`
  99. if [ -d "$HADOOP_HOME/build/native" ]; then
  100. JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
  101. fi
  102. if [ -d "${HADOOP_HOME}/lib/native" ]; then
  103. if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
  104. JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
  105. else
  106. JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
  107. fi
  108. fi
  109. fi
  110. # restore ordinary behaviour
  111. unset IFS
  112. # default log directory & file
  113. if [ "$HADOOP_LOG_DIR" = "" ]; then
  114. HADOOP_LOG_DIR="$HADOOP_HOME/logs"
  115. fi
  116. if [ "$HADOOP_LOGFILE" = "" ]; then
  117. HADOOP_LOGFILE='hadoop.log'
  118. fi
  119. # figure out which class to run
  120. if [ "$COMMAND" = "namenode" ] ; then
  121. CLASS='org.apache.hadoop.dfs.NameNode'
  122. elif [ "$COMMAND" = "secondarynamenode" ] ; then
  123. CLASS='org.apache.hadoop.dfs.SecondaryNameNode'
  124. elif [ "$COMMAND" = "datanode" ] ; then
  125. CLASS='org.apache.hadoop.dfs.DataNode'
  126. elif [ "$COMMAND" = "fs" ] ; then
  127. CLASS=org.apache.hadoop.fs.FsShell
  128. elif [ "$COMMAND" = "dfs" ] ; then
  129. CLASS=org.apache.hadoop.fs.FsShell
  130. elif [ "$COMMAND" = "dfsadmin" ] ; then
  131. CLASS=org.apache.hadoop.dfs.DFSAdmin
  132. elif [ "$COMMAND" = "fsck" ] ; then
  133. CLASS=org.apache.hadoop.dfs.DFSck
  134. elif [ "$COMMAND" = "jobtracker" ] ; then
  135. CLASS=org.apache.hadoop.mapred.JobTracker
  136. elif [ "$COMMAND" = "tasktracker" ] ; then
  137. CLASS=org.apache.hadoop.mapred.TaskTracker
  138. elif [ "$COMMAND" = "job" ] ; then
  139. CLASS=org.apache.hadoop.mapred.JobClient
  140. elif [ "$COMMAND" = "version" ] ; then
  141. CLASS=org.apache.hadoop.util.VersionInfo
  142. elif [ "$COMMAND" = "jar" ] ; then
  143. CLASS=org.apache.hadoop.util.RunJar
  144. elif [ "$COMMAND" = "distcp" ] ; then
  145. CLASS=org.apache.hadoop.util.CopyFiles
  146. else
  147. CLASS=$COMMAND
  148. fi
  149. # cygwin path translation
  150. if expr `uname` : 'CYGWIN*' > /dev/null; then
  151. CLASSPATH=`cygpath -p -w "$CLASSPATH"`
  152. HADOOP_HOME=`cygpath -d "$HADOOP_HOME"`
  153. HADOOP_LOG_DIR=`cygpath -d "$HADOOP_LOG_DIR"`
  154. fi
  155. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
  156. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
  157. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
  158. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
  159. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
  160. if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
  161. HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
  162. fi
  163. # run it
  164. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"