hadoop 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179
  1. #!/bin/sh
  2. #
  3. # The Hadoop command script
  4. #
  5. # Environment Variables
  6. #
  7. # JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
  8. #
  9. # HADOOP_HEAPSIZE The maximum amount of heap to use, in MB.
  10. # Default is 1000.
  11. #
  12. # HADOOP_OPTS Extra Java runtime options.
  13. #
  14. # HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  15. #
  16. # HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
  17. #
  18. bin=`dirname "$0"`
  19. bin=`cd "$bin"; pwd`
  20. . "$bin"/hadoop-config.sh
  21. # if no args specified, show usage
  22. if [ $# = 0 ]; then
  23. echo "Usage: hadoop [--config confdir] COMMAND"
  24. echo "where COMMAND is one of:"
  25. echo " namenode -format format the DFS filesystem"
  26. echo " namenode run the DFS namenode"
  27. echo " datanode run a DFS datanode"
  28. echo " dfsadmin run a DFS admin client"
  29. echo " dfs run a DFS user client"
  30. echo " fsck run a DFS filesystem checking utility"
  31. echo " jobtracker run the MapReduce job Tracker node"
  32. echo " tasktracker run a MapReduce task Tracker node"
  33. echo " job manipulate MapReduce jobs"
  34. echo " version print the version"
  35. echo " jar <jar> run a jar file"
  36. echo " distcp <srcurl> <desturl> copy file or directories recursively"
  37. echo " or"
  38. echo " CLASSNAME run the class named CLASSNAME"
  39. echo "Most commands print help when invoked w/o parameters."
  40. exit 1
  41. fi
  42. # get arguments
  43. COMMAND=$1
  44. shift
  45. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  46. . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  47. fi
  48. # some Java parameters
  49. if [ "$JAVA_HOME" != "" ]; then
  50. #echo "run java in $JAVA_HOME"
  51. JAVA_HOME=$JAVA_HOME
  52. fi
  53. if [ "$JAVA_HOME" = "" ]; then
  54. echo "Error: JAVA_HOME is not set."
  55. exit 1
  56. fi
  57. JAVA=$JAVA_HOME/bin/java
  58. JAVA_HEAP_MAX=-Xmx1000m
  59. # check envvars which might override default args
  60. if [ "$HADOOP_HEAPSIZE" != "" ]; then
  61. #echo "run with heapsize $HADOOP_HEAPSIZE"
  62. JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
  63. #echo $JAVA_HEAP_MAX
  64. fi
  65. # CLASSPATH initially contains $HADOOP_CONF_DIR
  66. CLASSPATH="${HADOOP_CONF_DIR}"
  67. CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
  68. # for developers, add Hadoop classes to CLASSPATH
  69. if [ -d "$HADOOP_HOME/build/classes" ]; then
  70. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
  71. fi
  72. if [ -d "$HADOOP_HOME/build/webapps" ]; then
  73. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
  74. fi
  75. if [ -d "$HADOOP_HOME/build/test/classes" ]; then
  76. CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
  77. fi
  78. # so that filenames w/ spaces are handled correctly in loops below
  79. IFS=
  80. # for releases, add hadoop jars & webapps to CLASSPATH
  81. if [ -d "$HADOOP_HOME/webapps" ]; then
  82. CLASSPATH=${CLASSPATH}:$HADOOP_HOME
  83. fi
  84. for f in $HADOOP_HOME/hadoop-*.jar; do
  85. CLASSPATH=${CLASSPATH}:$f;
  86. done
  87. # add libs to CLASSPATH
  88. for f in $HADOOP_HOME/lib/*.jar; do
  89. CLASSPATH=${CLASSPATH}:$f;
  90. done
  91. for f in $HADOOP_HOME/lib/jetty-ext/*.jar; do
  92. CLASSPATH=${CLASSPATH}:$f;
  93. done
  94. # setup 'java.library.path' for native-hadoop code if necessary
  95. JAVA_LIBRARY_PATH=''
  96. if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
  97. JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName`
  98. if [ -d "$HADOOP_HOME/build/native" ]; then
  99. JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
  100. fi
  101. if [ -d "${HADOOP_HOME}/lib/native" ]; then
  102. JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
  103. fi
  104. fi
  105. # restore ordinary behaviour
  106. unset IFS
  107. # default log directory & file
  108. if [ "$HADOOP_LOG_DIR" = "" ]; then
  109. HADOOP_LOG_DIR="$HADOOP_HOME/logs"
  110. fi
  111. if [ "$HADOOP_LOGFILE" = "" ]; then
  112. HADOOP_LOGFILE='hadoop.log'
  113. fi
  114. # figure out which class to run
  115. if [ "$COMMAND" = "namenode" ] ; then
  116. CLASS='org.apache.hadoop.dfs.NameNode'
  117. elif [ "$COMMAND" = "datanode" ] ; then
  118. CLASS='org.apache.hadoop.dfs.DataNode'
  119. elif [ "$COMMAND" = "dfs" ] ; then
  120. CLASS=org.apache.hadoop.dfs.DFSShell
  121. elif [ "$COMMAND" = "dfsadmin" ] ; then
  122. CLASS=org.apache.hadoop.dfs.DFSAdmin
  123. elif [ "$COMMAND" = "fsck" ] ; then
  124. CLASS=org.apache.hadoop.dfs.DFSck
  125. elif [ "$COMMAND" = "jobtracker" ] ; then
  126. CLASS=org.apache.hadoop.mapred.JobTracker
  127. elif [ "$COMMAND" = "tasktracker" ] ; then
  128. CLASS=org.apache.hadoop.mapred.TaskTracker
  129. elif [ "$COMMAND" = "job" ] ; then
  130. CLASS=org.apache.hadoop.mapred.JobClient
  131. elif [ "$COMMAND" = "version" ] ; then
  132. CLASS=org.apache.hadoop.util.VersionInfo
  133. elif [ "$COMMAND" = "jar" ] ; then
  134. CLASS=org.apache.hadoop.util.RunJar
  135. elif [ "$COMMAND" = "distcp" ] ; then
  136. CLASS=org.apache.hadoop.util.CopyFiles
  137. else
  138. CLASS=$COMMAND
  139. fi
  140. # cygwin path translation
  141. if expr `uname` : 'CYGWIN*' > /dev/null; then
  142. CLASSPATH=`cygpath -p -w "$CLASSPATH"`
  143. HADOOP_HOME=`cygpath -d "$HADOOP_HOME"`
  144. HADOOP_LOG_DIR=`cygpath -d "$HADOOP_LOG_DIR"`
  145. fi
  146. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
  147. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
  148. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
  149. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
  150. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
  151. if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
  152. HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
  153. fi
  154. # run it
  155. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"