Browse Source

HDFS-11803. Add -v option for du command to show header line. Contributed by Xiaobing Zhou

Mingliang Liu 8 years ago
parent
commit
1db186f662

+ 11 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java

@@ -160,7 +160,7 @@ class FsUsage extends FsCommand {
   /** show disk usage */
   public static class Du extends FsUsage {
     public static final String NAME = "du";
-    public static final String USAGE = "[-s] [-h] [-x] <path> ...";
+    public static final String USAGE = "[-s] [-h] [-v] [-x] <path> ...";
     public static final String DESCRIPTION =
         "Show the amount of space, in bytes, used by the files that match " +
             "the specified file pattern. The following flags are optional:\n" +
@@ -168,6 +168,7 @@ class FsUsage extends FsCommand {
             " matches the pattern, shows the total (summary) size.\n" +
             "-h: Formats the sizes of files in a human-readable fashion" +
             " rather than a number of bytes.\n" +
+            "-v: option displays a header line.\n" +
             "-x: Excludes snapshots from being counted.\n\n" +
             "Note that, even without the -s option, this only shows size " +
             "summaries one level deep into a directory.\n\n" +
@@ -175,14 +176,16 @@ class FsUsage extends FsCommand {
             "\tsize\tdisk space consumed\tname(full path)\n";
 
     protected boolean summary = false;
+    private boolean showHeaderLine = false;
     private boolean excludeSnapshots = false;
     
     @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
-      CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "h", "s", "x");
+      CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "h", "s", "v", "x");
       cf.parse(args);
       setHumanReadable(cf.getOpt("h"));
       summary = cf.getOpt("s");
+      showHeaderLine = cf.getOpt("v");
       excludeSnapshots = cf.getOpt("x");
       if (args.isEmpty()) args.add(Path.CUR_DIR);
     }
@@ -190,7 +193,12 @@ class FsUsage extends FsCommand {
     @Override
     protected void processArguments(LinkedList<PathData> args)
         throws IOException {
-      setUsagesTable(new TableBuilder(3));
+      if (showHeaderLine) {
+        setUsagesTable(new TableBuilder("SIZE",
+            "DISK_SPACE_CONSUMED_WITH_ALL_REPLICAS", "FULL_PATH_NAME"));
+      } else {
+        setUsagesTable(new TableBuilder(3));
+      }
       super.processArguments(args);
       if (!getUsagesTable().isEmpty()) {
         getUsagesTable().printToStream(out);

+ 1 - 0
hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md

@@ -232,6 +232,7 @@ Options:
 
 * The -s option will result in an aggregate summary of file lengths being displayed, rather than the individual files. Without the -s option, calculation is done by going 1-level deep from the given path.
 * The -h option will format file sizes in a "human-readable" fashion (e.g 64.0m instead of 67108864)
+* The -v option will display the names of columns as a header line.
 * The -x option will exclude snapshots from the result calculation. Without the -x option (default), the result is always calculated from all INodes, including all snapshots under the given path.
 
 The du returns three columns with the following format:

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/resources/testConf.xml

@@ -200,7 +200,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^-du \[-s\] \[-h\] \[-x\] &lt;path&gt; \.\.\. :\s*</expected-output>
+          <expected-output>^-du \[-s\] \[-h\] \[-v\] \[-x\] &lt;path&gt; \.\.\. :\s*</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>