hdfs 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. bin=`dirname "$0"`
  17. bin=`cd "$bin"; pwd`
  18. . "$bin"/hdfs-config.sh
  19. function print_usage(){
  20. echo "Usage: hdfs [--config confdir] COMMAND"
  21. echo " where COMMAND is one of:"
  22. echo " namenode -format format the DFS filesystem"
  23. echo " secondarynamenode run the DFS secondary namenode"
  24. echo " namenode run the DFS namenode"
  25. echo " datanode run a DFS datanode"
  26. echo " dfsadmin run a DFS admin client"
  27. echo " fsck run a DFS filesystem checking utility"
  28. echo " balancer run a cluster balancing utility"
  29. echo " jmxget get JMX exported values from NameNode or DataNode."
  30. echo " oiv apply the offline fsimage viewer to an fsimage"
  31. echo " fetchdt fetch a delegation token from the NameNode"
  32. echo " Use -help to see options"
  33. echo ""
  34. echo "Most commands print help when invoked w/o parameters."
  35. }
  36. if [ $# = 0 ]; then
  37. print_usage
  38. exit
  39. fi
  40. COMMAND=$1
  41. shift
  42. if [ "$COMMAND" = "namenode" ] ; then
  43. CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
  44. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
  45. elif [ "$COMMAND" = "secondarynamenode" ] ; then
  46. CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
  47. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
  48. elif [ "$COMMAND" = "datanode" ] ; then
  49. CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
  50. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
  51. elif [ "$COMMAND" = "dfs" ] ; then
  52. CLASS=org.apache.hadoop.fs.FsShell
  53. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  54. elif [ "$COMMAND" = "dfsadmin" ] ; then
  55. CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
  56. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  57. elif [ "$COMMAND" = "fsck" ] ; then
  58. CLASS=org.apache.hadoop.hdfs.tools.DFSck
  59. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  60. elif [ "$COMMAND" = "balancer" ] ; then
  61. CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
  62. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
  63. elif [ "$COMMAND" = "jmxget" ] ; then
  64. CLASS=org.apache.hadoop.hdfs.tools.JMXGet
  65. elif [ "$COMMAND" = "oiv" ] ; then
  66. CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
  67. elif [ "$COMMAND" = "fetchdt" ] ; then
  68. CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
  69. else
  70. echo $COMMAND - invalid command
  71. print_usage
  72. exit
  73. fi
  74. # for developers, add hdfs classes to CLASSPATH
  75. if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
  76. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
  77. fi
  78. if [ -d "$HADOOP_HDFS_HOME/build/webapps" ]; then
  79. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build
  80. fi
  81. if [ -d "$HADOOP_HDFS_HOME/build/test/classes" ]; then
  82. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/test/classes
  83. fi
  84. if [ -d "$HADOOP_HDFS_HOME/build/tools" ]; then
  85. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/tools
  86. fi
  87. # for releases, add core hdfs jar & webapps to CLASSPATH
  88. if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
  89. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
  90. fi
  91. for f in $HADOOP_HDFS_HOME/hadoop-hdfs-*.jar; do
  92. CLASSPATH=${CLASSPATH}:$f;
  93. done
  94. # add libs to CLASSPATH
  95. for f in $HADOOP_HDFS_HOME/lib/*.jar; do
  96. CLASSPATH=${CLASSPATH}:$f;
  97. done
  98. if $cygwin; then
  99. CLASSPATH=`cygpath -p -w "$CLASSPATH"`
  100. fi
  101. export CLASSPATH=$CLASSPATH
  102. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"