hdfs 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements. See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. bin=`dirname "$0"`
  17. bin=`cd "$bin"; pwd`
  18. . "$bin"/hdfs-config.sh
  19. function print_usage(){
  20. echo "Usage: hdfs [--config confdir] COMMAND"
  21. echo " where COMMAND is one of:"
  22. echo " namenode -format format the DFS filesystem"
  23. echo " secondarynamenode run the DFS secondary namenode"
  24. echo " namenode run the DFS namenode"
  25. echo " datanode run a DFS datanode"
  26. echo " dfsadmin run a DFS admin client"
  27. echo " fsck run a DFS filesystem checking utility"
  28. echo " balancer run a cluster balancing utility"
  29. echo " jmxget get JMX exported values from NameNode or DataNode."
  30. echo " oiv apply the offline fsimage viewer to an fsimage"
  31. echo " Use -help to see options"
  32. echo ""
  33. echo "Most commands print help when invoked w/o parameters."
  34. }
  35. if [ $# = 0 ]; then
  36. print_usage
  37. exit
  38. fi
  39. COMMAND=$1
  40. shift
  41. if [ "$COMMAND" = "namenode" ] ; then
  42. CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
  43. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
  44. elif [ "$COMMAND" = "secondarynamenode" ] ; then
  45. CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
  46. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
  47. elif [ "$COMMAND" = "datanode" ] ; then
  48. CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
  49. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
  50. elif [ "$COMMAND" = "dfs" ] ; then
  51. CLASS=org.apache.hadoop.fs.FsShell
  52. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  53. elif [ "$COMMAND" = "dfsadmin" ] ; then
  54. CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
  55. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  56. elif [ "$COMMAND" = "fsck" ] ; then
  57. CLASS=org.apache.hadoop.hdfs.tools.DFSck
  58. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  59. elif [ "$COMMAND" = "balancer" ] ; then
  60. CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
  61. HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
  62. elif [ "$COMMAND" = "jmxget" ] ; then
  63. CLASS=org.apache.hadoop.hdfs.tools.JMXGet
  64. elif [ "$COMMAND" = "oiv" ] ; then
  65. CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
  66. else
  67. echo $COMMAND - invalid command
  68. print_usage
  69. exit
  70. fi
  71. # for developers, add hdfs classes to CLASSPATH
  72. if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
  73. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
  74. fi
  75. if [ -d "$HADOOP_HDFS_HOME/build/webapps" ]; then
  76. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build
  77. fi
  78. if [ -d "$HADOOP_HDFS_HOME/build/test/classes" ]; then
  79. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/test/classes
  80. fi
  81. if [ -d "$HADOOP_HDFS_HOME/build/tools" ]; then
  82. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/tools
  83. fi
  84. # for releases, add core hdfs jar & webapps to CLASSPATH
  85. if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
  86. CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
  87. fi
  88. for f in $HADOOP_HDFS_HOME/hadoop-hdfs-*.jar; do
  89. CLASSPATH=${CLASSPATH}:$f;
  90. done
  91. # add libs to CLASSPATH
  92. for f in $HADOOP_HDFS_HOME/lib/*.jar; do
  93. CLASSPATH=${CLASSPATH}:$f;
  94. done
  95. if $cygwin; then
  96. CLASSPATH=`cygpath -p -w "$CLASSPATH"`
  97. fi
  98. export CLASSPATH=$CLASSPATH
  99. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"