Przeglądaj źródła

HADOOP-18359. Update commons-cli from 1.2 to 1.5. (#5095). Contributed by Shilun Fan.

Signed-off-by: Ayush Saxena <ayushsaxena@apache.org>
slfan1989 2 lat temu
rodzic
commit
a2dda0ce03
23 zmienionych plików z 385 dodań i 434 usunięć
  1. 1 1
      LICENSE-binary
  2. 14 16
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
  3. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfTest.java
  4. 27 29
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
  5. 28 54
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java
  6. 34 52
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
  7. 57 38
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
  8. 5 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
  9. 21 22
      hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java
  10. 6 6
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
  11. 49 50
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DiskBalancerCLI.java
  12. 25 32
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/JMXGet.java
  13. 8 10
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsViewer.java
  14. 5 11
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java
  15. 3 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java
  16. 15 15
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
  17. 4 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
  18. 9 10
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
  19. 33 33
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/main/java/org/apache/hadoop/mapred/uploader/FrameworkUploader.java
  20. 1 1
      hadoop-project/pom.xml
  21. 18 17
      hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/src/main/java/org/apache/hadoop/tools/dynamometer/blockgenerator/GenerateBlockImagesDriver.java
  22. 12 13
      hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/WorkloadDriver.java
  23. 8 8
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

+ 1 - 1
LICENSE-binary

@@ -245,7 +245,7 @@ com.squareup.okhttp3:okhttp:4.10.0
 com.squareup.okio:okio:3.2.0
 com.zaxxer:HikariCP:4.0.3
 commons-beanutils:commons-beanutils:1.9.4
-commons-cli:commons-cli:1.2
+commons-cli:commons-cli:1.5.0
 commons-codec:commons-codec:1.11
 commons-collections:commons-collections:3.2.2
 commons-daemon:commons-daemon:1.0.13

+ 14 - 16
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java

@@ -33,7 +33,6 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.audit.CommonAuditContext;
@@ -362,29 +361,28 @@ public class ServiceLauncher<S extends Service>
   /**
    * Override point: create an options instance to combine with the 
    * standard options set.
-   * <i>Important. Synchronize uses of {@link OptionBuilder}</i>
-   * with {@code OptionBuilder.class}
+   * <i>Important. Synchronize uses of {@link Option}</i>
+   * with {@code Option.class}
    * @return the new options
    */
   @SuppressWarnings("static-access")
   protected Options createOptions() {
-    synchronized (OptionBuilder.class) {
+    synchronized (Option.class) {
       Options options = new Options();
-      Option oconf = OptionBuilder.withArgName("configuration file")
+      Option oconf = Option.builder(ARG_CONF_SHORT).argName("configuration file")
           .hasArg()
-          .withDescription("specify an application configuration file")
-          .withLongOpt(ARG_CONF)
-          .create(ARG_CONF_SHORT);
-      Option confclass = OptionBuilder.withArgName("configuration classname")
+          .desc("specify an application configuration file")
+          .longOpt(ARG_CONF)
+          .build();
+      Option confclass = Option.builder(ARG_CONFCLASS_SHORT).argName("configuration classname")
           .hasArg()
-          .withDescription(
-              "Classname of a Hadoop Configuration subclass to load")
-          .withLongOpt(ARG_CONFCLASS)
-          .create(ARG_CONFCLASS_SHORT);
-      Option property = OptionBuilder.withArgName("property=value")
+          .desc("Classname of a Hadoop Configuration subclass to load")
+          .longOpt(ARG_CONFCLASS)
+          .build();
+      Option property = Option.builder("D").argName("property=value")
           .hasArg()
-          .withDescription("use value for given property")
-          .create('D');
+          .desc("use value for given property")
+          .build();
       options.addOption(oconf);
       options.addOption(property);
       options.addOption(confclass);

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfTest.java

@@ -46,7 +46,6 @@ import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.MissingArgumentException;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -230,8 +229,8 @@ public final class ConfTest {
     GenericOptionsParser genericParser = new GenericOptionsParser(args);
     String[] remainingArgs = genericParser.getRemainingArgs();
 
-    Option conf = OptionBuilder.hasArg().create("conffile");
-    Option help = OptionBuilder.withLongOpt("help").create('h');
+    Option conf = Option.builder("conffile").hasArg().build();
+    Option help = Option.builder("h").longOpt("help").hasArg().build();
     Options opts = new Options().addOption(conf).addOption(help);
     CommandLineParser specificParser = new GnuParser();
     CommandLine cmd = null;

+ 27 - 29
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java

@@ -32,7 +32,6 @@ import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -225,51 +224,50 @@ public class GenericOptionsParser {
 
   /**
    * @return Specify properties of each generic option.
-   * <i>Important</i>: as {@link OptionBuilder} is not thread safe, subclasses
-   * must synchronize use on {@code OptionBuilder.class}
+   * <i>Important</i>: as {@link Option} is not thread safe, subclasses
+   * must synchronize use on {@code Option.class}
    * @param opts input opts.
    */
   @SuppressWarnings("static-access")
   protected Options buildGeneralOptions(Options opts) {
-    synchronized (OptionBuilder.class) {
-      Option fs = OptionBuilder.withArgName("file:///|hdfs://namenode:port")
+    synchronized (Option.class) {
+      Option fs = Option.builder("fs").argName("file:///|hdfs://namenode:port")
           .hasArg()
-          .withDescription("specify default filesystem URL to use, "
+          .desc("specify default filesystem URL to use, "
           + "overrides 'fs.defaultFS' property from configurations.")
-          .create("fs");
-      Option jt = OptionBuilder.withArgName("local|resourcemanager:port")
+          .build();
+      Option jt = Option.builder("jt").argName("local|resourcemanager:port")
           .hasArg()
-          .withDescription("specify a ResourceManager")
-          .create("jt");
-      Option oconf = OptionBuilder.withArgName("configuration file")
+          .desc("specify a ResourceManager")
+          .build();
+      Option oconf =  Option.builder("conf").argName("configuration file")
           .hasArg()
-          .withDescription("specify an application configuration file")
-          .create("conf");
-      Option property = OptionBuilder.withArgName("property=value")
+          .desc("specify an application configuration file")
+          .build();
+      Option property = Option.builder("D").argName("property=value")
           .hasArg()
-          .withDescription("use value for given property")
-          .create('D');
-      Option libjars = OptionBuilder.withArgName("paths")
+          .desc("use value for given property")
+          .build();
+      Option libjars = Option.builder("libjars").argName("paths")
           .hasArg()
-          .withDescription(
-              "comma separated jar files to include in the classpath.")
-          .create("libjars");
-      Option files = OptionBuilder.withArgName("paths")
+          .desc("comma separated jar files to include in the classpath.")
+          .build();
+      Option files = Option.builder("files").argName("paths")
           .hasArg()
-          .withDescription("comma separated files to be copied to the " +
+          .desc("comma separated files to be copied to the " +
               "map reduce cluster")
-          .create("files");
-      Option archives = OptionBuilder.withArgName("paths")
+          .build();
+      Option archives = Option.builder("archives").argName("paths")
           .hasArg()
-          .withDescription("comma separated archives to be unarchived" +
+          .desc("comma separated archives to be unarchived" +
               " on the compute machines.")
-          .create("archives");
+          .build();
 
       // file with security tokens
-      Option tokensFile = OptionBuilder.withArgName("tokensFile")
+      Option tokensFile = Option.builder("tokenCacheFile").argName("tokensFile")
           .hasArg()
-          .withDescription("name of the file with the tokens")
-          .create("tokenCacheFile");
+          .desc("name of the file with the tokens")
+          .build();
 
 
       opts.addOption(fs);

+ 28 - 54
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java

@@ -29,10 +29,9 @@ import org.junit.Test;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.DefaultParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
@@ -272,7 +271,7 @@ public class TestTFileSeek {
 
       try {
         Options opts = buildOptions();
-        CommandLineParser parser = new GnuParser();
+        CommandLineParser parser = new DefaultParser();
         CommandLine line = parser.parse(opts, args, true);
         processOptions(line, opts);
         validateOptions();
@@ -290,81 +289,56 @@ public class TestTFileSeek {
 
     private Options buildOptions() {
       Option compress =
-          OptionBuilder.withLongOpt("compress").withArgName("[none|lzo|gz]")
-              .hasArg().withDescription("compression scheme").create('c');
+          Option.builder("c").longOpt("compress").argName("[none|lzo|gz]")
+          .hasArg().desc("compression scheme").build();
 
       Option fileSize =
-          OptionBuilder.withLongOpt("file-size").withArgName("size-in-MB")
-              .hasArg().withDescription("target size of the file (in MB).")
-              .create('s');
+          Option.builder("s").longOpt("file-size").argName("size-in-MB")
+          .hasArg().desc("target size of the file (in MB).").build();
 
       Option fsInputBufferSz =
-          OptionBuilder.withLongOpt("fs-input-buffer").withArgName("size")
-              .hasArg().withDescription(
-                  "size of the file system input buffer (in bytes).").create(
-                  'i');
+          Option.builder("i").longOpt("fs-input-buffer").argName("size")
+          .hasArg().desc("size of the file system input buffer (in bytes).").build();
 
       Option fsOutputBufferSize =
-          OptionBuilder.withLongOpt("fs-output-buffer").withArgName("size")
-              .hasArg().withDescription(
-                  "size of the file system output buffer (in bytes).").create(
-                  'o');
+          Option.builder("o").longOpt("fs-output-buffer").argName("size")
+          .hasArg().desc("size of the file system output buffer (in bytes).").build();
 
       Option keyLen =
-          OptionBuilder
-              .withLongOpt("key-length")
-              .withArgName("min,max")
-              .hasArg()
-              .withDescription(
-                  "the length range of the key (in bytes)")
-              .create('k');
+          Option.builder("k").longOpt("key-length").argName("min,max")
+          .hasArg().desc("the length range of the key (in bytes)").build();
 
       Option valueLen =
-          OptionBuilder
-              .withLongOpt("value-length")
-              .withArgName("min,max")
-              .hasArg()
-              .withDescription(
-                  "the length range of the value (in bytes)")
-              .create('v');
+          Option.builder("v").longOpt("value-length").argName("min,max")
+          .hasArg().desc("the length range of the value (in bytes)").build();
 
       Option blockSz =
-          OptionBuilder.withLongOpt("block").withArgName("size-in-KB").hasArg()
-              .withDescription("minimum block size (in KB)").create('b');
+          Option.builder("b").longOpt("block").argName("size-in-KB").hasArg()
+          .desc("minimum block size (in KB)").build();
 
       Option seed =
-          OptionBuilder.withLongOpt("seed").withArgName("long-int").hasArg()
-              .withDescription("specify the seed").create('S');
+          Option.builder("S").longOpt("seed").argName("long-int").hasArg()
+          .desc("specify the seed").build();
 
       Option operation =
-          OptionBuilder.withLongOpt("operation").withArgName("r|w|rw").hasArg()
-              .withDescription(
-                  "action: seek-only, create-only, seek-after-create").create(
-                  'x');
+          Option.builder("x").longOpt("operation").argName("r|w|rw").hasArg()
+          .desc("action: seek-only, create-only, seek-after-create").build();
 
       Option rootDir =
-          OptionBuilder.withLongOpt("root-dir").withArgName("path").hasArg()
-              .withDescription(
-                  "specify root directory where files will be created.")
-              .create('r');
+          Option.builder("r").longOpt("root-dir").argName("path").hasArg()
+          .desc("specify root directory where files will be created.").build();
 
       Option file =
-          OptionBuilder.withLongOpt("file").withArgName("name").hasArg()
-              .withDescription("specify the file name to be created or read.")
-              .create('f');
+          Option.builder("f").longOpt("file").argName("name").hasArg()
+          .desc("specify the file name to be created or read.").build();
 
       Option seekCount =
-          OptionBuilder
-              .withLongOpt("seek")
-              .withArgName("count")
-              .hasArg()
-              .withDescription(
-                  "specify how many seek operations we perform (requires -x r or -x rw.")
-              .create('n');
+          Option.builder("n").longOpt("seek").argName("count").hasArg()
+          .desc("specify how many seek operations we perform (requires -x r or -x rw.").build();
 
       Option help =
-          OptionBuilder.withLongOpt("help").hasArg(false).withDescription(
-              "show this screen").create("h");
+          Option.builder("h").longOpt("help").hasArg(false)
+          .desc("show this screen").build();
 
       return new Options().addOption(compress).addOption(fileSize).addOption(
           fsInputBufferSz).addOption(fsOutputBufferSize).addOption(keyLen)

+ 34 - 52
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java

@@ -30,10 +30,9 @@ import org.junit.Test;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.DefaultParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
@@ -553,7 +552,7 @@ public class TestTFileSeqFileComparison {
 
       try {
         Options opts = buildOptions();
-        CommandLineParser parser = new GnuParser();
+        CommandLineParser parser = new DefaultParser();
         CommandLine line = parser.parse(opts, args, true);
         processOptions(line, opts);
         validateOptions();
@@ -571,87 +570,70 @@ public class TestTFileSeqFileComparison {
 
     private Options buildOptions() {
       Option compress =
-          OptionBuilder.withLongOpt("compress").withArgName("[none|lzo|gz]")
-              .hasArg().withDescription("compression scheme").create('c');
+          Option.builder("c").longOpt("compress").argName("[none|lzo|gz]")
+          .hasArg().desc("compression scheme").build();
 
       Option ditSize =
-          OptionBuilder.withLongOpt("dict").withArgName("size").hasArg()
-              .withDescription("number of dictionary entries").create('d');
+          Option.builder("d").longOpt("dict").argName("size")
+          .hasArg().desc("number of dictionary entries").build();
 
       Option fileSize =
-          OptionBuilder.withLongOpt("file-size").withArgName("size-in-MB")
-              .hasArg().withDescription("target size of the file (in MB).")
-              .create('s');
+          Option.builder("s").longOpt("file-size").argName("size-in-MB")
+          .hasArg().desc("target size of the file (in MB).").build();
 
       Option format =
-          OptionBuilder.withLongOpt("format").withArgName("[tfile|seqfile]")
-              .hasArg().withDescription("choose TFile or SeqFile").create('f');
+          Option.builder("f").longOpt("format").argName("[tfile|seqfile]")
+          .hasArg().desc("choose TFile or SeqFile").build();
 
       Option fsInputBufferSz =
-          OptionBuilder.withLongOpt("fs-input-buffer").withArgName("size")
-              .hasArg().withDescription(
-                  "size of the file system input buffer (in bytes).").create(
-                  'i');
+          Option.builder("i").longOpt("fs-input-buffer").argName("size")
+          .hasArg().desc("size of the file system input buffer (in bytes).").build();
 
       Option fsOutputBufferSize =
-          OptionBuilder.withLongOpt("fs-output-buffer").withArgName("size")
-              .hasArg().withDescription(
-                  "size of the file system output buffer (in bytes).").create(
-                  'o');
+          Option.builder("o").longOpt("fs-output-buffer").argName("size")
+          .hasArg().desc("size of the file system output buffer (in bytes).").build();
 
       Option keyLen =
-          OptionBuilder
-              .withLongOpt("key-length")
-              .withArgName("length")
-              .hasArg()
-              .withDescription(
-                  "base length of the key (in bytes), actual length varies in [base, 2*base)")
-              .create('k');
+          Option.builder("o").longOpt("key-length").argName("length")
+          .hasArg()
+          .desc("base length of the key (in bytes), actual length varies in [base, 2*base)")
+          .build();
 
       Option valueLen =
-          OptionBuilder
-              .withLongOpt("value-length")
-              .withArgName("length")
-              .hasArg()
-              .withDescription(
-                  "base length of the value (in bytes), actual length varies in [base, 2*base)")
-              .create('v');
+          Option.builder("v").longOpt("key-length").argName("length")
+          .longOpt("value-length").argName("length").hasArg()
+          .desc("base length of the value (in bytes), actual length varies in [base, 2*base)")
+          .build();
 
       Option wordLen =
-          OptionBuilder.withLongOpt("word-length").withArgName("min,max")
-              .hasArg().withDescription(
-                  "range of dictionary word length (in bytes)").create('w');
+          Option.builder("w").longOpt("word-length").argName("min,max")
+          .hasArg().desc("range of dictionary word length (in bytes)").build();
 
       Option blockSz =
-          OptionBuilder.withLongOpt("block").withArgName("size-in-KB").hasArg()
-              .withDescription("minimum block size (in KB)").create('b');
+          Option.builder("b").longOpt("block").argName("size-in-KB").hasArg()
+          .desc("minimum block size (in KB)").build();
 
       Option seed =
-          OptionBuilder.withLongOpt("seed").withArgName("long-int").hasArg()
-              .withDescription("specify the seed").create('S');
+          Option.builder("S").longOpt("seed").argName("long-int").hasArg()
+          .desc("specify the seed").build();
 
       Option operation =
-          OptionBuilder.withLongOpt("operation").withArgName("r|w|rw").hasArg()
-              .withDescription(
-                  "action: read-only, create-only, read-after-create").create(
-                  'x');
+          Option.builder("x").longOpt("operation").argName("r|w|rw").hasArg()
+          .desc("action: read-only, create-only, read-after-create").build();
 
       Option rootDir =
-          OptionBuilder.withLongOpt("root-dir").withArgName("path").hasArg()
-              .withDescription(
-                  "specify root directory where files will be created.")
-              .create('r');
+          Option.builder("r").longOpt("root-dir").argName("path").hasArg()
+          .desc("specify root directory where files will be created.").build();
 
       Option help =
-          OptionBuilder.withLongOpt("help").hasArg(false).withDescription(
-              "show this screen").create("h");
+          Option.builder("h").longOpt("help").hasArg(false)
+          .desc("show this screen").build();
 
       return new Options().addOption(compress).addOption(ditSize).addOption(
           fileSize).addOption(format).addOption(fsInputBufferSz).addOption(
           fsOutputBufferSize).addOption(keyLen).addOption(wordLen).addOption(
           blockSz).addOption(rootDir).addOption(valueLen).addOption(operation)
           .addOption(help);
-
     }
 
     private void processOptions(CommandLine line, Options opts)

+ 57 - 38
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java

@@ -23,7 +23,7 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
@@ -88,59 +88,78 @@ public class RPCCallBenchmark extends TestRpcBase implements Tool {
       }
     }
 
-    @SuppressWarnings("static-access")
     private Options buildOptions() {
       Options opts = new Options();
       opts.addOption(
-        OptionBuilder.withLongOpt("serverThreads").hasArg(true)
-        .withArgName("numthreads")
-        .withDescription("number of server threads (handlers) to run (or 0 to not run server)")
-        .create("s"));
+          Option.builder("s")
+          .longOpt("serverThreads")
+          .hasArg(true)
+          .argName("numthreads")
+          .desc("number of server threads (handlers) to run (or 0 to not run server)")
+          .build());
+
       opts.addOption(
-        OptionBuilder.withLongOpt("serverReaderThreads").hasArg(true)
-        .withArgName("threads")
-        .withDescription("number of server reader threads to run")
-        .create("r"));
+          Option.builder("r")
+          .longOpt("serverReaderThreads")
+          .hasArg(true)
+          .argName("threads")
+          .desc("number of server reader threads to run")
+          .build());
 
-      
       opts.addOption(
-        OptionBuilder.withLongOpt("clientThreads").hasArg(true)
-        .withArgName("numthreads")
-        .withDescription("number of client threads to run (or 0 to not run client)")
-        .create("c"));
+          Option.builder("c")
+          .longOpt("clientThreads")
+          .hasArg(true)
+          .argName("numthreads")
+          .desc("number of client threads to run (or 0 to not run client)")
+          .build());
 
       opts.addOption(
-        OptionBuilder.withLongOpt("messageSize").hasArg(true)
-        .withArgName("bytes")
-        .withDescription("size of call parameter in bytes")
-        .create("m"));
+          Option.builder("m")
+          .longOpt("messageSize")
+          .hasArg(true)
+          .argName("bytes")
+          .desc("size of call parameter in bytes")
+          .build());
 
       opts.addOption(
-          OptionBuilder.withLongOpt("time").hasArg(true)
-          .withArgName("seconds")
-          .withDescription("number of seconds to run clients for")
-          .create("t"));
+          Option.builder("t")
+          .longOpt("time")
+          .hasArg(true)
+          .argName("seconds")
+          .desc("number of seconds to run clients for")
+          .build());
+
       opts.addOption(
-          OptionBuilder.withLongOpt("port").hasArg(true)
-          .withArgName("port")
-          .withDescription("port to listen or connect on")
-          .create("p"));
+          Option.builder("p")
+          .longOpt("port")
+          .hasArg(true)
+          .argName("port")
+          .desc("port to listen or connect on")
+          .build());
+
       opts.addOption(
-          OptionBuilder.withLongOpt("host").hasArg(true)
-          .withArgName("addr")
-          .withDescription("host to listen or connect on")
-          .create('h'));
+          Option.builder("h")
+          .longOpt("host")
+          .hasArg(true)
+          .argName("addr")
+          .desc("host to listen or connect on")
+          .build());
       
       opts.addOption(
-          OptionBuilder.withLongOpt("engine").hasArg(true)
-          .withArgName("protobuf")
-          .withDescription("engine to use")
-          .create('e'));
+          Option.builder("e")
+          .longOpt("engine")
+          .hasArg(true)
+          .argName("protobuf")
+          .desc("engine to use")
+          .build());
       
       opts.addOption(
-          OptionBuilder.withLongOpt("help").hasArg(false)
-          .withDescription("show this screen")
-          .create('?'));
+          Option.builder("?")
+          .longOpt("help")
+          .hasArg(false)
+          .desc("show this screen")
+          .build());
 
       return opts;
     }

+ 5 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java

@@ -34,7 +34,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.math3.util.Pair;
 import org.apache.hadoop.conf.Configuration;
@@ -198,10 +197,11 @@ public class TestGenericOptionsParser {
   @Test
   public void testCreateWithOptions() throws Exception {
     // Create new option newOpt
-    Option opt = OptionBuilder.withArgName("int")
-    .hasArg()
-    .withDescription("A new option")
-    .create("newOpt");
+
+    Option opt = Option.builder("newOpt").argName("int")
+        .hasArg()
+        .desc("A new option")
+        .build();
     Options opts = new Options();
     opts.addOption(opt);
 

+ 21 - 22
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java

@@ -32,7 +32,6 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
@@ -246,29 +245,29 @@ public class RegistryCli extends Configured implements Tool, Closeable {
   }
 
   public int bind(String[] args) {
-    Option rest = OptionBuilder.withArgName("rest")
+    Option rest = Option.builder("rest").argName("rest")
                                .hasArg()
-                               .withDescription("rest Option")
-                               .create("rest");
-    Option webui = OptionBuilder.withArgName("webui")
+                               .desc("rest Option")
+                               .build();
+    Option webui = Option.builder("webui").argName("webui")
                                 .hasArg()
-                                .withDescription("webui Option")
-                                .create("webui");
-    Option inet = OptionBuilder.withArgName("inet")
-                               .withDescription("inet Option")
-                               .create("inet");
-    Option port = OptionBuilder.withArgName("port")
+                                .desc("webui Option")
+                                .build();
+    Option inet = Option.builder("inet").argName("inet")
+                               .desc("inet Option")
+                               .build();
+    Option port = Option.builder("p").argName("port")
                                .hasArg()
-                               .withDescription("port to listen on [9999]")
-                               .create("p");
-    Option host = OptionBuilder.withArgName("host")
+                               .desc("port to listen on [9999]")
+                               .build();
+    Option host = Option.builder("h").argName("host")
                                .hasArg()
-                               .withDescription("host name")
-                               .create("h");
-    Option apiOpt = OptionBuilder.withArgName("api")
+                               .desc("host name")
+                               .build();
+    Option apiOpt = Option.builder("api").argName("api")
                                  .hasArg()
-                                 .withDescription("api")
-                                 .create("api");
+                                 .desc("api")
+                                 .build();
     Options inetOption = new Options();
     inetOption.addOption(inet);
     inetOption.addOption(port);
@@ -412,9 +411,9 @@ public class RegistryCli extends Configured implements Tool, Closeable {
 
   @SuppressWarnings("unchecked")
   public int rm(String[] args) {
-    Option recursive = OptionBuilder.withArgName("recursive")
-                                    .withDescription("delete recursively")
-                                    .create("r");
+    Option recursive = Option.builder("r").argName("recursive")
+                                    .desc("delete recursively")
+                                    .build();
 
     Options rmOption = new Options();
     rmOption.addOption(recursive);

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java

@@ -724,12 +724,12 @@ public class Mover {
 
     private static Options buildCliOptions() {
       Options opts = new Options();
-      Option file = OptionBuilder.withArgName("pathsFile").hasArg()
-          .withDescription("a local file containing files/dirs to migrate")
-          .create("f");
-      Option paths = OptionBuilder.withArgName("paths").hasArgs()
-          .withDescription("specify space separated files/dirs to migrate")
-          .create("p");
+      Option file = Option.builder("f").argName("pathsFile").hasArg()
+          .desc("a local file containing files/dirs to migrate")
+          .build();
+      Option paths = Option.builder("p").argName("paths").hasArgs()
+          .desc("specify space separated files/dirs to migrate")
+          .build();
       OptionGroup group = new OptionGroup();
       group.addOption(file);
       group.addOption(paths);

+ 49 - 50
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DiskBalancerCLI.java

@@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.tools;
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
@@ -279,34 +278,34 @@ public class DiskBalancerCLI extends Configured implements Tool {
    */
   private void addPlanCommands(Options opt) {
 
-    Option plan = OptionBuilder.withLongOpt(PLAN)
-        .withDescription("Hostname, IP address or UUID of datanode " +
+    Option plan = Option.builder().longOpt(PLAN)
+        .desc("Hostname, IP address or UUID of datanode " +
             "for which a plan is created.")
         .hasArg()
-        .create();
+        .build();
     getPlanOptions().addOption(plan);
     opt.addOption(plan);
 
 
-    Option outFile = OptionBuilder.withLongOpt(OUTFILE).hasArg()
-        .withDescription(
+    Option outFile = Option.builder().longOpt(OUTFILE).hasArg()
+        .desc(
             "Local path of file to write output to, if not specified "
                 + "defaults will be used.")
-        .create();
+        .build();
     getPlanOptions().addOption(outFile);
     opt.addOption(outFile);
 
-    Option bandwidth = OptionBuilder.withLongOpt(BANDWIDTH).hasArg()
-        .withDescription(
+    Option bandwidth = Option.builder().longOpt(BANDWIDTH).hasArg()
+        .desc(
             "Maximum disk bandwidth (MB/s) in integer to be consumed by "
                 + "diskBalancer. e.g. 10 MB/s.")
-        .create();
+        .build();
     getPlanOptions().addOption(bandwidth);
     opt.addOption(bandwidth);
 
-    Option threshold = OptionBuilder.withLongOpt(THRESHOLD)
+    Option threshold = Option.builder().longOpt(THRESHOLD)
         .hasArg()
-        .withDescription("Percentage of data skew that is tolerated before"
+        .desc("Percentage of data skew that is tolerated before"
             + " disk balancer starts working. For example, if"
             + " total data on a 2 disk node is 100 GB then disk"
             + " balancer calculates the expected value on each disk,"
@@ -314,22 +313,22 @@ public class DiskBalancerCLI extends Configured implements Tool {
             + " on a single disk needs to be more than 60 GB"
             + " (50 GB + 10% tolerance value) for Disk balancer to"
             + " balance the disks.")
-        .create();
+        .build();
     getPlanOptions().addOption(threshold);
     opt.addOption(threshold);
 
 
-    Option maxError = OptionBuilder.withLongOpt(MAXERROR)
+    Option maxError = Option.builder().longOpt(MAXERROR)
         .hasArg()
-        .withDescription("Describes how many errors " +
+        .desc("Describes how many errors " +
             "can be tolerated while copying between a pair of disks.")
-        .create();
+        .build();
     getPlanOptions().addOption(maxError);
     opt.addOption(maxError);
 
-    Option verbose = OptionBuilder.withLongOpt(VERBOSE)
-        .withDescription("Print out the summary of the plan on console")
-        .create();
+    Option verbose = Option.builder().longOpt(VERBOSE)
+        .desc("Print out the summary of the plan on console")
+        .build();
     getPlanOptions().addOption(verbose);
     opt.addOption(verbose);
   }
@@ -338,11 +337,11 @@ public class DiskBalancerCLI extends Configured implements Tool {
    * Adds Help to the options.
    */
   private void addHelpCommands(Options opt) {
-    Option help = OptionBuilder.withLongOpt(HELP)
-        .hasOptionalArg()
-        .withDescription("valid commands are plan | execute | query | cancel" +
+    Option help =  Option.builder().longOpt(HELP)
+        .optionalArg(true)
+        .desc("valid commands are plan | execute | query | cancel" +
             " | report")
-        .create();
+        .build();
     getHelpOptions().addOption(help);
     opt.addOption(help);
   }
@@ -353,17 +352,17 @@ public class DiskBalancerCLI extends Configured implements Tool {
    * @param opt Options
    */
   private void addExecuteCommands(Options opt) {
-    Option execute = OptionBuilder.withLongOpt(EXECUTE)
+    Option execute = Option.builder().longOpt(EXECUTE)
         .hasArg()
-        .withDescription("Takes a plan file and " +
+        .desc("Takes a plan file and " +
             "submits it for execution by the datanode.")
-        .create();
+        .build();
     getExecuteOptions().addOption(execute);
 
 
-    Option skipDateCheck = OptionBuilder.withLongOpt(SKIPDATECHECK)
-        .withDescription("skips the date check and force execute the plan")
-        .create();
+    Option skipDateCheck = Option.builder().longOpt(SKIPDATECHECK)
+        .desc("skips the date check and force execute the plan")
+        .build();
     getExecuteOptions().addOption(skipDateCheck);
 
     opt.addOption(execute);
@@ -376,20 +375,20 @@ public class DiskBalancerCLI extends Configured implements Tool {
    * @param opt Options
    */
   private void addQueryCommands(Options opt) {
-    Option query = OptionBuilder.withLongOpt(QUERY)
+    Option query = Option.builder().longOpt(QUERY)
         .hasArg()
-        .withDescription("Queries the disk balancer " +
+        .desc("Queries the disk balancer " +
             "status of a given datanode.")
-        .create();
+        .build();
     getQueryOptions().addOption(query);
     opt.addOption(query);
 
     // Please note: Adding this only to Query options since -v is already
     // added to global table.
-    Option verbose = OptionBuilder.withLongOpt(VERBOSE)
-        .withDescription("Prints details of the plan that is being executed " +
+    Option verbose = Option.builder().longOpt(VERBOSE)
+        .desc("Prints details of the plan that is being executed " +
             "on the node.")
-        .create();
+        .build();
     getQueryOptions().addOption(verbose);
   }
 
@@ -399,17 +398,17 @@ public class DiskBalancerCLI extends Configured implements Tool {
    * @param opt Options
    */
   private void addCancelCommands(Options opt) {
-    Option cancel = OptionBuilder.withLongOpt(CANCEL)
+    Option cancel = Option.builder().longOpt(CANCEL)
         .hasArg()
-        .withDescription("Cancels a running plan using a plan file.")
-        .create();
+        .desc("Cancels a running plan using a plan file.")
+        .build();
     getCancelOptions().addOption(cancel);
     opt.addOption(cancel);
 
-    Option node = OptionBuilder.withLongOpt(NODE)
+    Option node = Option.builder().longOpt(NODE)
         .hasArg()
-        .withDescription("Cancels a running plan using a plan ID and hostName")
-        .create();
+        .desc("Cancels a running plan using a plan ID and hostName")
+        .build();
 
     getCancelOptions().addOption(node);
     opt.addOption(node);
@@ -421,26 +420,26 @@ public class DiskBalancerCLI extends Configured implements Tool {
    * @param opt Options
    */
   private void addReportCommands(Options opt) {
-    Option report = OptionBuilder.withLongOpt(REPORT)
-        .withDescription("List nodes that will benefit from running " +
+    Option report = Option.builder().longOpt(REPORT)
+        .desc("List nodes that will benefit from running " +
             "DiskBalancer.")
-        .create();
+        .build();
     getReportOptions().addOption(report);
     opt.addOption(report);
 
-    Option top = OptionBuilder.withLongOpt(TOP)
+    Option top = Option.builder().longOpt(TOP)
         .hasArg()
-        .withDescription("specify the number of nodes to be listed which has" +
+        .desc("specify the number of nodes to be listed which has" +
             " data imbalance.")
-        .create();
+        .build();
     getReportOptions().addOption(top);
     opt.addOption(top);
 
-    Option node =  OptionBuilder.withLongOpt(NODE)
+    Option node =  Option.builder().longOpt(NODE)
         .hasArg()
-        .withDescription("Datanode address, " +
+        .desc("Datanode address, " +
             "it can be DataNodeID, IP or hostname.")
-        .create();
+        .build();
     getReportOptions().addOption(node);
     opt.addOption(node);
   }

+ 25 - 32
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/JMXGet.java

@@ -39,7 +39,6 @@ import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 
@@ -247,39 +246,33 @@ public class JMXGet {
   private static CommandLine parseArgs(Options opts, String... args)
   throws IllegalArgumentException {
 
-    OptionBuilder.withArgName("NameNode|DataNode");
-    OptionBuilder.hasArg();
-    OptionBuilder.withDescription("specify jmx service (NameNode by default)");
-    Option jmx_service = OptionBuilder.create("service");
-
-    OptionBuilder.withArgName("mbean server");
-    OptionBuilder.hasArg();
-    OptionBuilder
-    .withDescription("specify mbean server (localhost by default)");
-    Option jmx_server = OptionBuilder.create("server");
-
-    OptionBuilder.withDescription("print help");
-    Option jmx_help = OptionBuilder.create("help");
-
-    OptionBuilder.withArgName("mbean server port");
-    OptionBuilder.hasArg();
-    OptionBuilder.withDescription("specify mbean server port, "
-        + "if missing - it will try to connect to MBean Server in the same VM");
-    Option jmx_port = OptionBuilder.create("port");
-
-    OptionBuilder.withArgName("VM's connector url");
-    OptionBuilder.hasArg();
-    OptionBuilder.withDescription("connect to the VM on the same machine;"
+    Option jmxService = Option.builder("service")
+        .argName("NameNode|DataNode").hasArg()
+        .desc("specify jmx service (NameNode by default)").build();
+
+    Option jmxServer = Option.builder("server")
+        .argName("mbean server").hasArg()
+        .desc("specify mbean server (localhost by default)").build();
+
+    Option jmxHelp = Option.builder("help").desc("print help").build();
+
+    Option jmxPort = Option.builder("port")
+        .argName("mbean server port")
+        .hasArg().desc("specify mbean server port, "
+        + "if missing - it will try to connect to MBean Server in the same VM").build();
+
+    Option jmxLocalVM = Option.builder("localVM")
+        .argName("VM's connector url").hasArg()
+        .desc("connect to the VM on the same machine;"
         + "\n use:\n jstat -J-Djstat.showUnsupported=true -snap <vmpid> | "
         + "grep sun.management.JMXConnectorServer.address\n "
-        + "to find the url");
-    Option jmx_localVM = OptionBuilder.create("localVM");
-
-    opts.addOption(jmx_server);
-    opts.addOption(jmx_help);
-    opts.addOption(jmx_service);
-    opts.addOption(jmx_port);
-    opts.addOption(jmx_localVM);
+        + "to find the url").build();
+
+    opts.addOption(jmxServer);
+    opts.addOption(jmxHelp);
+    opts.addOption(jmxService);
+    opts.addOption(jmxPort);
+    opts.addOption(jmxLocalVM);
 
     CommandLine commandLine = null;
     CommandLineParser parser = new GnuParser();

+ 8 - 10
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsViewer.java

@@ -28,7 +28,7 @@ import org.apache.hadoop.util.ToolRunner;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
@@ -100,15 +100,13 @@ public class OfflineEditsViewer extends Configured implements Tool {
 
     // Build in/output file arguments, which are required, but there is no 
     // addOption method that can specify this
-    OptionBuilder.isRequired();
-    OptionBuilder.hasArgs();
-    OptionBuilder.withLongOpt("outputFilename");
-    options.addOption(OptionBuilder.create("o"));
-    
-    OptionBuilder.isRequired();
-    OptionBuilder.hasArgs();
-    OptionBuilder.withLongOpt("inputFilename");
-    options.addOption(OptionBuilder.create("i"));
+    Option optionOutputFileName =
+        Option.builder("o").required().hasArgs().longOpt("outputFilename").build();
+    options.addOption(optionOutputFileName);
+
+    Option optionInputFilename =
+        Option.builder("i").required().hasArgs().longOpt("inputFilename").build();
+    options.addOption(optionInputFilename);
     
     options.addOption("p", "processor", true, "");
     options.addOption("v", "verbose", false, "");

+ 5 - 11
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java

@@ -26,7 +26,7 @@ import java.nio.file.Paths;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
@@ -177,16 +177,10 @@ public class OfflineImageViewer {
 
     // Build in/output file arguments, which are required, but there is no 
     // addOption method that can specify this
-    OptionBuilder.isRequired();
-    OptionBuilder.hasArgs();
-    OptionBuilder.withLongOpt("outputFile");
-    options.addOption(OptionBuilder.create("o"));
-    
-    OptionBuilder.isRequired();
-    OptionBuilder.hasArgs();
-    OptionBuilder.withLongOpt("inputFile");
-    options.addOption(OptionBuilder.create("i"));
-    
+    options.addOption(Option.builder("o").required().hasArgs().longOpt("outputFile").build());
+
+    options.addOption(Option.builder("i").required().hasArgs().longOpt("inputFile").build());
+
     options.addOption("p", "processor", true, "");
     options.addOption("h", "help", false, "");
     options.addOption("maxSize", true, "");

+ 3 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java

@@ -24,7 +24,7 @@ import java.io.RandomAccessFile;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
@@ -137,10 +137,8 @@ public class OfflineImageViewerPB {
 
     // Build in/output file arguments, which are required, but there is no
     // addOption method that can specify this
-    OptionBuilder.isRequired();
-    OptionBuilder.hasArgs();
-    OptionBuilder.withLongOpt("inputFile");
-    options.addOption(OptionBuilder.create("i"));
+    Option optionInputFile = Option.builder("i").required().hasArgs().longOpt("inputFile").build();
+    options.addOption(optionInputFile);
 
     options.addOption("o", "outputFile", true, "");
     options.addOption("p", "processor", true, "");

+ 15 - 15
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java

@@ -29,7 +29,7 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
@@ -88,23 +88,23 @@ public class MiniDFSClusterManager {
         .addOption("httpport", true, "NameNode http port (default 0--we choose)")
         .addOption("namenode", true, "URL of the namenode (default "
             + "is either the DFS cluster or a temporary dir)")     
-        .addOption(OptionBuilder
+        .addOption(Option.builder("D")
             .hasArgs()
-            .withArgName("property=value")
-            .withDescription("Options to pass into configuration object")
-            .create("D"))
-        .addOption(OptionBuilder
+            .argName("property=value")
+            .desc("Options to pass into configuration object")
+            .build())
+        .addOption(Option.builder("writeConfig")
             .hasArg()
-            .withArgName("path")
-            .withDescription("Save configuration to this XML file.")
-            .create("writeConfig"))
-         .addOption(OptionBuilder
+            .argName("path")
+            .desc("Save configuration to this XML file.")
+            .build())
+         .addOption(Option.builder("writeDetails")
             .hasArg()
-            .withArgName("path")
-            .withDescription("Write basic information to this JSON file.")
-            .create("writeDetails"))
-        .addOption(OptionBuilder.withDescription("Prints option help.")
-            .create("help"));
+            .argName("path")
+            .desc("Write basic information to this JSON file.")
+            .build())
+        .addOption(Option.builder("help").desc("Prints option help.")
+            .build());
     return options;
   }
 

+ 4 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java

@@ -30,7 +30,6 @@ import java.util.StringTokenizer;
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.Parser;
@@ -346,12 +345,14 @@ public class Submitter extends Configured implements Tool {
     
     void addOption(String longName, boolean required, String description, 
                    String paramName) {
-      Option option = OptionBuilder.withArgName(paramName).hasArgs(1).withDescription(description).isRequired(required).create(longName);
+      Option option = Option.builder(longName).argName(paramName)
+          .hasArg().desc(description).required(required).build();
       options.addOption(option);
     }
     
     void addArgument(String name, boolean required, String description) {
-      Option option = OptionBuilder.withArgName(name).hasArgs(1).withDescription(description).isRequired(required).create();
+      Option option = Option.builder().argName(name)
+          .hasArg().desc(description).required(required).build();
       options.addOption(option);
 
     }

+ 9 - 10
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java

@@ -31,7 +31,7 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.fs.FileSystem;
@@ -105,18 +105,17 @@ public class MiniHadoopClusterManager {
         .addOption("jhsport", true,
             "JobHistoryServer port (default 0--we choose)")
         .addOption(
-            OptionBuilder.hasArgs().withArgName("property=value")
-                .withDescription("Options to pass into configuration object")
-                .create("D"))
+            Option.builder("D").hasArgs().argName("property=value")
+                .desc("Options to pass into configuration object")
+                .build())
         .addOption(
-            OptionBuilder.hasArg().withArgName("path").withDescription(
-                "Save configuration to this XML file.").create("writeConfig"))
+                Option.builder("writeConfig").hasArg().argName("path").desc(
+                "Save configuration to this XML file.").build())
         .addOption(
-            OptionBuilder.hasArg().withArgName("path").withDescription(
-                "Write basic information to this JSON file.").create(
-                "writeDetails"))
+                Option.builder("writeDetails").argName("path").desc(
+                "Write basic information to this JSON file.").build())
         .addOption(
-            OptionBuilder.withDescription("Prints option help.").create("help"));
+                Option.builder("help").desc("Prints option help.").build());
     return options;
   }
 

+ 33 - 33
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/main/java/org/apache/hadoop/mapred/uploader/FrameworkUploader.java

@@ -20,7 +20,7 @@ package org.apache.hadoop.mapred.uploader;
 
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.compress.archivers.ArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
@@ -484,53 +484,53 @@ public class FrameworkUploader implements Runnable {
   @VisibleForTesting
   boolean parseArguments(String[] args) throws IOException {
     Options opts = new Options();
-    opts.addOption(OptionBuilder.create("h"));
-    opts.addOption(OptionBuilder.create("help"));
-    opts.addOption(OptionBuilder
-        .withDescription("Input class path. Defaults to the default classpath.")
-        .hasArg().create("input"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+    opts.addOption(Option.builder("h").build());
+    opts.addOption(Option.builder("help").build());
+    opts.addOption(Option.builder("input")
+        .desc("Input class path. Defaults to the default classpath.")
+        .hasArg().build());
+    opts.addOption(Option.builder("whitelist")
+        .desc(
             "Regex specifying the full path of jars to include in the" +
                 " framework tarball. Default is a hardcoded set of jars" +
                 " considered necessary to include")
-        .hasArg().create("whitelist"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+        .hasArg().build());
+    opts.addOption(Option.builder("blacklist")
+        .desc(
             "Regex specifying the full path of jars to exclude in the" +
                 " framework tarball. Default is a hardcoded set of jars" +
                 " considered unnecessary to include")
-        .hasArg().create("blacklist"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+        .hasArg().build());
+    opts.addOption(Option.builder("fs")
+        .desc(
             "Target file system to upload to." +
             " Example: hdfs://foo.com:8020")
-        .hasArg().create("fs"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+        .hasArg().build());
+    opts.addOption(Option.builder("target")
+        .desc(
             "Target file to upload to with a reference name." +
                 " Example: /usr/mr-framework.tar.gz#mr-framework")
-        .hasArg().create("target"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+        .hasArg().build());
+    opts.addOption(Option.builder("initialReplication")
+        .desc(
             "Desired initial replication count. Default 3.")
-        .hasArg().create("initialReplication"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+        .hasArg().build());
+    opts.addOption(Option.builder("finalReplication")
+        .desc(
             "Desired final replication count. Default 10.")
-        .hasArg().create("finalReplication"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+        .hasArg().build());
+    opts.addOption(Option.builder("acceptableReplication")
+        .desc(
             "Desired acceptable replication count. Default 9.")
-        .hasArg().create("acceptableReplication"));
-    opts.addOption(OptionBuilder
-        .withDescription(
+        .hasArg().build());
+    opts.addOption(Option.builder("timeout")
+        .desc(
             "Desired timeout for the acceptable" +
                 " replication in seconds. Default 10")
-        .hasArg().create("timeout"));
-    opts.addOption(OptionBuilder
-        .withDescription("Ignore symlinks into the same directory")
-        .create("nosymlink"));
+        .hasArg().build());
+    opts.addOption(Option.builder("nosymlink")
+        .desc("Ignore symlinks into the same directory")
+        .build());
     GenericOptionsParser parser = new GenericOptionsParser(opts, args);
     if (parser.getCommandLine().hasOption("help") ||
         parser.getCommandLine().hasOption("h")) {

+ 1 - 1
hadoop-project/pom.xml

@@ -113,7 +113,7 @@
 
     <!-- Apache Commons dependencies -->
     <commons-beanutils.version>1.9.4</commons-beanutils.version>
-    <commons-cli.version>1.2</commons-cli.version>
+    <commons-cli.version>1.5.0</commons-cli.version>
     <commons-codec.version>1.15</commons-codec.version>
     <commons-collections.version>3.2.2</commons-collections.version>
     <commons-compress.version>1.21</commons-compress.version>

+ 18 - 17
hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/src/main/java/org/apache/hadoop/tools/dynamometer/blockgenerator/GenerateBlockImagesDriver.java

@@ -21,7 +21,7 @@ import java.net.URI;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.PosixParser;
 import org.apache.hadoop.conf.Configuration;
@@ -65,24 +65,25 @@ public class GenerateBlockImagesDriver extends Configured implements Tool {
   public int run(String[] args) throws Exception {
     Options options = new Options();
     options.addOption("h", "help", false, "Shows this message");
-    options.addOption(OptionBuilder.withArgName("Input path of the XML fsImage")
-        .hasArg().isRequired(true)
-        .withDescription("Input path to the Hadoop fsImage XML file (required)")
-        .create(FSIMAGE_INPUT_PATH_ARG));
-    options.addOption(OptionBuilder.withArgName("BlockImage output directory")
-        .hasArg().isRequired(true)
-        .withDescription("Directory where the generated files containing the "
+    options.addOption(Option.builder(FSIMAGE_INPUT_PATH_ARG)
+        .argName("Input path of the XML fsImage")
+        .hasArg().required(true)
+        .desc("Input path to the Hadoop fsImage XML file (required)")
+        .build());
+    options.addOption(Option.builder(BLOCK_IMAGE_OUTPUT_ARG).argName("BlockImage output directory")
+        .hasArg().required(true)
+        .desc("Directory where the generated files containing the "
             + "block listing for each DataNode should be stored (required)")
-        .create(BLOCK_IMAGE_OUTPUT_ARG));
-    options.addOption(OptionBuilder.withArgName("Number of reducers").hasArg()
-        .isRequired(false)
-        .withDescription(
+        .build());
+    options.addOption(Option.builder(NUM_REDUCERS_ARG).argName("Number of reducers").hasArg()
+        .required(false)
+        .desc(
             "Number of reducers for this job (defaults to number of datanodes)")
-        .create(NUM_REDUCERS_ARG));
-    options.addOption(OptionBuilder.withArgName("Number of datanodes").hasArg()
-        .isRequired(true)
-        .withDescription("Number of DataNodes to create blocks for (required)")
-        .create(NUM_DATANODES_ARG));
+        .build());
+    options.addOption(Option.builder(NUM_DATANODES_ARG).argName("Number of datanodes").hasArg()
+        .required(true)
+        .desc("Number of DataNodes to create blocks for (required)")
+        .build());
 
     CommandLineParser parser = new PosixParser();
     CommandLine cli = parser.parse(options, args);

+ 12 - 13
hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/WorkloadDriver.java

@@ -26,7 +26,6 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.OptionGroup;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.PosixParser;
@@ -64,28 +63,28 @@ public class WorkloadDriver extends Configured implements Tool {
             + " argument to show help for a specific mapper class.");
     Options options = new Options();
     options.addOption(helpOption);
-    options.addOption(OptionBuilder.withArgName("NN URI").hasArg()
-        .withDescription("URI of the NameNode under test").isRequired()
-        .create(NN_URI));
+    options.addOption(Option.builder(NN_URI).argName("NN URI").hasArg()
+        .desc("URI of the NameNode under test").required()
+        .build());
     OptionGroup startTimeOptions = new OptionGroup();
-    startTimeOptions.addOption(OptionBuilder.withArgName("Start Timestamp")
-        .hasArg().withDescription("Mapper start UTC timestamp in ms")
-        .create(START_TIMESTAMP_MS));
+    startTimeOptions.addOption(Option.builder(START_TIMESTAMP_MS).argName("Start Timestamp")
+        .hasArg().desc("Mapper start UTC timestamp in ms")
+        .build());
     startTimeOptions
-        .addOption(OptionBuilder.withArgName("Start Time Offset").hasArg()
-            .withDescription("Mapper start time as an offset from current "
+        .addOption(Option.builder(START_TIME_OFFSET_DEFAULT).argName("Start Time Offset").hasArg()
+            .desc("Mapper start time as an offset from current "
                 + "time. Human-readable formats accepted, e.g. 10m (default "
                 + START_TIME_OFFSET_DEFAULT + ").")
-            .create(START_TIME_OFFSET));
+            .build());
     options.addOptionGroup(startTimeOptions);
-    Option mapperClassOption = OptionBuilder.withArgName("Mapper ClassName")
+    Option mapperClassOption = Option.builder(MAPPER_CLASS_NAME).argName("Mapper ClassName")
         .hasArg()
-        .withDescription("Class name of the mapper; must be a WorkloadMapper "
+        .desc("Class name of the mapper; must be a WorkloadMapper "
             + "subclass. Mappers supported currently: \n"
             + "1. AuditReplayMapper \n"
             + "2. CreateFileMapper \n"
             + "Fully specified class names are also supported.")
-        .isRequired().create(MAPPER_CLASS_NAME);
+        .required().build();
     options.addOption(mapperClassOption);
 
     Options helpOptions = new Options();

+ 8 - 8
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

@@ -34,7 +34,6 @@ import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -382,16 +381,17 @@ public class StreamJob implements Tool {
 
   private Option createOption(String name, String desc,
                               String argName, int max, boolean required){
-    return OptionBuilder
-           .withArgName(argName)
-           .hasArgs(max)
-           .withDescription(desc)
-           .isRequired(required)
-           .create(name);
+    return Option.builder(name)
+           .argName(argName)
+           .hasArgs()
+           .numberOfArgs(max)
+           .desc(desc)
+           .required(required)
+           .build();
   }
 
   private Option createBoolOption(String name, String desc){
-    return OptionBuilder.withDescription(desc).create(name);
+    return Option.builder(name).desc(desc).build();
   }
 
   private void validate(final Path path) throws IOException {