فهرست منبع

MAPREDUCE-7421. [JDK17] Upgrade Junit 4 to 5 in hadoop-mapreduce-client-jobclient Part1. (#7358)

Co-authored-by: Chris Nauroth <cnauroth@apache.org>
Reviewed-by: Chris Nauroth <cnauroth@apache.org>
Signed-off-by: Shilun Fan <slfan1989@apache.org>
slfan1989 2 ماه پیش
والد
کامیت
19bd575e32
100فایلهای تغییر یافته به همراه1270 افزوده شده و 1179 حذف شده
  1. 7 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java
  2. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
  3. 18 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
  4. 6 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
  5. 5 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
  6. 5 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
  7. 23 21
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java
  8. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
  9. 11 9
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
  10. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
  11. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java
  12. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java
  13. 8 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
  14. 9 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
  15. 9 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
  16. 118 82
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
  17. 7 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
  18. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java
  19. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java
  20. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineOutputCollector.java
  21. 11 9
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java
  22. 18 15
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java
  23. 8 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
  24. 7 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestComparators.java
  25. 64 79
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
  26. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
  27. 6 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
  28. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java
  29. 44 34
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
  30. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java
  31. 3 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java
  32. 5 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java
  33. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java
  34. 14 15
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
  35. 23 20
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java
  36. 6 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java
  37. 19 19
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java
  38. 5 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
  39. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java
  40. 26 26
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
  41. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java
  42. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReaderJobs.java
  43. 17 19
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java
  44. 11 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
  45. 13 13
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
  46. 5 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java
  47. 6 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java
  48. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java
  49. 66 71
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
  50. 6 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapOutputType.java
  51. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java
  52. 17 19
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
  53. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java
  54. 4 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java
  55. 20 23
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java
  56. 5 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java
  57. 35 36
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java
  58. 6 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java
  59. 8 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java
  60. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
  61. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
  62. 10 12
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java
  63. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
  64. 24 15
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
  65. 16 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java
  66. 6 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java
  67. 9 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java
  68. 14 13
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
  69. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java
  70. 19 19
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java
  71. 37 35
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java
  72. 8 12
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java
  73. 18 25
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
  74. 7 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java
  75. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
  76. 5 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
  77. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
  78. 5 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
  79. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
  80. 16 16
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java
  81. 18 18
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java
  82. 44 45
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java
  83. 69 58
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
  84. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java
  85. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java
  86. 3 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java
  87. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java
  88. 58 48
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
  89. 22 15
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java
  90. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java
  91. 20 19
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java
  92. 25 21
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java
  93. 5 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
  94. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java
  95. 6 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java
  96. 4 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java
  97. 5 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java
  98. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java
  99. 11 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
  100. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java

+ 7 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java

@@ -29,7 +29,7 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.Utils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -39,8 +39,10 @@ import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * This testcase tests that a JobConf without default values submits jobs
@@ -56,10 +58,10 @@ public class TestNoDefaultsJobConf extends HadoopTestCase {
   @Test
   public void testNoDefaults() throws Exception {
     JobConf configuration = new JobConf();
-    assertTrue(configuration.get("hadoop.tmp.dir", null) != null);
+    assertNotNull(configuration.get("hadoop.tmp.dir", null));
 
     configuration = new JobConf(false);
-    assertTrue(configuration.get("hadoop.tmp.dir", null) == null);
+    assertNull(configuration.get("hadoop.tmp.dir", null));
 
 
     Path inDir = new Path("testing/jobconf/input");

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java

@@ -34,8 +34,8 @@ import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.*;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -66,7 +66,7 @@ import org.slf4j.LoggerFactory;
  * <li>standard i/o rate deviation</li>
  * </ul>
  */
-@Ignore
+@Disabled
 public class DFSCIOTest {
   // Constants
   private static final Logger LOG = LoggerFactory.getLogger(DFSCIOTest.class);

+ 18 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java

@@ -66,9 +66,10 @@ import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -226,7 +227,7 @@ public class TestDFSIO implements Tool {
   private static MiniDFSCluster cluster;
   private static TestDFSIO bench;
 
-  @BeforeClass
+  @BeforeAll
   public static void beforeClass() throws Exception {
     bench = new TestDFSIO();
     bench.getConf().setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
@@ -241,7 +242,7 @@ public class TestDFSIO implements Tool {
     testWrite();
   }
 
-  @AfterClass
+  @AfterAll
   public static void afterClass() throws Exception {
     if(cluster == null)
       return;
@@ -256,14 +257,16 @@ public class TestDFSIO implements Tool {
     bench.analyzeResult(fs, TestType.TEST_TYPE_WRITE, execTime);
   }
 
-  @Test (timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testRead() throws Exception {
     FileSystem fs = cluster.getFileSystem();
     long execTime = bench.readTest(fs);
     bench.analyzeResult(fs, TestType.TEST_TYPE_READ, execTime);
   }
 
-  @Test (timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testReadRandom() throws Exception {
     FileSystem fs = cluster.getFileSystem();
     bench.getConf().setLong("test.io.skip.size", 0);
@@ -271,7 +274,8 @@ public class TestDFSIO implements Tool {
     bench.analyzeResult(fs, TestType.TEST_TYPE_READ_RANDOM, execTime);
   }
 
-  @Test (timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testReadBackward() throws Exception {
     FileSystem fs = cluster.getFileSystem();
     bench.getConf().setLong("test.io.skip.size", -DEFAULT_BUFFER_SIZE);
@@ -279,7 +283,8 @@ public class TestDFSIO implements Tool {
     bench.analyzeResult(fs, TestType.TEST_TYPE_READ_BACKWARD, execTime);
   }
 
-  @Test (timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testReadSkip() throws Exception {
     FileSystem fs = cluster.getFileSystem();
     bench.getConf().setLong("test.io.skip.size", 1);
@@ -287,14 +292,16 @@ public class TestDFSIO implements Tool {
     bench.analyzeResult(fs, TestType.TEST_TYPE_READ_SKIP, execTime);
   }
 
-  @Test (timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testAppend() throws Exception {
     FileSystem fs = cluster.getFileSystem();
     long execTime = bench.appendTest(fs);
     bench.analyzeResult(fs, TestType.TEST_TYPE_APPEND, execTime);
   }
 
-  @Test (timeout = 60000)
+  @Test
+  @Timeout(value = 60)
   public void testTruncate() throws Exception {
     FileSystem fs = cluster.getFileSystem();
     bench.createControlFile(fs, DEFAULT_NR_BYTES / 2, DEFAULT_NR_FILES);

+ 6 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java

@@ -47,16 +47,16 @@ import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotSame;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.fail;
 
 
 public class TestFileSystem {

+ 5 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java

@@ -23,9 +23,9 @@ import java.io.FileOutputStream;
 import java.io.OutputStreamWriter;
 import java.io.File;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,7 +40,7 @@ public class TestJHLA {
   private String historyLog = System.getProperty("test.build.data", 
                                   "build/test/data") + "/history/test.log";
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     File logFile = new File(historyLog);
     if(!logFile.getParentFile().exists())
@@ -121,7 +121,7 @@ public class TestJHLA {
     writer.close();
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     File logFile = new File(historyLog);
     if(!logFile.delete())

+ 5 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java

@@ -19,8 +19,8 @@
 package org.apache.hadoop.fs.slive;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.DataInputStream;
 import java.io.File;
@@ -40,8 +40,8 @@ import org.apache.hadoop.fs.slive.Constants.OperationType;
 import org.apache.hadoop.fs.slive.DataVerifier.VerifyOutput;
 import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
 import org.apache.hadoop.util.ToolRunner;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -194,7 +194,7 @@ public class TestSlive {
     return extractor;
   }
 
-  @Before
+  @BeforeEach
   public void ensureDeleted() throws Exception {
     rDelete(getTestFile());
     rDelete(getTestDir());

+ 23 - 21
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java

@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.hdfs;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
 import java.io.IOException;
@@ -31,8 +31,9 @@ import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.ToolRunner;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 public class TestNNBench extends HadoopTestCase {
   private static final String BASE_DIR =
@@ -45,39 +46,39 @@ public class TestNNBench extends HadoopTestCase {
     super(LOCAL_MR, LOCAL_FS, 1, 1);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     getFileSystem().delete(new Path(BASE_DIR), true);
     getFileSystem().delete(new Path(NNBench.DEFAULT_RES_FILE_NAME), true);
     super.tearDown();
   }
 
-  @Test(timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testNNBenchCreateReadAndDelete() throws Exception {
     runNNBench(createJobConf(), "create_write");
     Path path = new Path(BASE_DIR + "/data/file_0_0");
-    assertTrue("create_write should create the file",
-        getFileSystem().exists(path));
+    assertTrue(getFileSystem().exists(path), "create_write should create the file");
     runNNBench(createJobConf(), "open_read");
     runNNBench(createJobConf(), "delete");
-    assertFalse("Delete operation should delete the file",
-        getFileSystem().exists(path));
+    assertFalse(getFileSystem().exists(path),
+        "Delete operation should delete the file");
   }
 
-  @Test(timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testNNBenchCreateAndRename() throws Exception {
     runNNBench(createJobConf(), "create_write");
     Path path = new Path(BASE_DIR + "/data/file_0_0");
-    assertTrue("create_write should create the file",
-        getFileSystem().exists(path));
+    assertTrue(getFileSystem().exists(path), "create_write should create the file");
     runNNBench(createJobConf(), "rename");
     Path renamedPath = new Path(BASE_DIR + "/data/file_0_r_0");
-    assertFalse("Rename should rename the file", getFileSystem().exists(path));
-    assertTrue("Rename should rename the file",
-        getFileSystem().exists(renamedPath));
+    assertFalse(getFileSystem().exists(path), "Rename should rename the file");
+    assertTrue(getFileSystem().exists(renamedPath), "Rename should rename the file");
   }
 
-  @Test(timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testNNBenchCreateControlFilesWithPool() throws Exception {
     runNNBench(createJobConf(), "create_write", BASE_DIR, "5");
     Path path = new Path(BASE_DIR, CONTROL_DIR_NAME);
@@ -86,7 +87,8 @@ public class TestNNBench extends HadoopTestCase {
     assertEquals(5, fileStatuses.length);
   }
 
-  @Test(timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testNNBenchCrossCluster() throws Exception {
     MiniDFSCluster dfsCluster = new MiniDFSCluster.Builder(new JobConf())
             .numDataNodes(1).build();
@@ -96,8 +98,8 @@ public class TestNNBench extends HadoopTestCase {
     runNNBench(createJobConf(), "create_write", baseDir);
 
     Path path = new Path(BASE_DIR + "/data/file_0_0");
-    assertTrue("create_write should create the file",
-            dfsCluster.getFileSystem().exists(path));
+    assertTrue(dfsCluster.getFileSystem().exists(path),
+        "create_write should create the file");
     dfsCluster.shutdown();
   }
 

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java

@@ -28,8 +28,8 @@ import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.mapred.*;
 
 import org.slf4j.Logger;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestSequenceFileMergeProgress {
   private static final Logger LOG = FileInputFormat.LOG;

+ 11 - 9
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java

@@ -34,8 +34,10 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 import org.apache.hadoop.net.StandardSocketFactory;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * This class checks that RPCs can use specialized socket factories.
@@ -56,13 +58,13 @@ public class TestMRCJCSocketFactory {
 
     // Get a reference to its DFS directly
     FileSystem fs = cluster.getFileSystem();
-    Assert.assertTrue(fs instanceof DistributedFileSystem);
+    assertTrue(fs instanceof DistributedFileSystem);
     DistributedFileSystem directDfs = (DistributedFileSystem) fs;
 
     Configuration cconf = getCustomSocketConfigs(nameNodePort);
 
     fs = FileSystem.get(cconf);
-    Assert.assertTrue(fs instanceof DistributedFileSystem);
+    assertTrue(fs instanceof DistributedFileSystem);
     DistributedFileSystem dfs = (DistributedFileSystem) fs;
 
     JobClient client = null;
@@ -72,12 +74,12 @@ public class TestMRCJCSocketFactory {
       // could we test Client-DataNode connections?
       Path filePath = new Path("/dir");
 
-      Assert.assertFalse(directDfs.exists(filePath));
-      Assert.assertFalse(dfs.exists(filePath));
+      assertFalse(directDfs.exists(filePath));
+      assertFalse(dfs.exists(filePath));
 
       directDfs.mkdirs(filePath);
-      Assert.assertTrue(directDfs.exists(filePath));
-      Assert.assertTrue(dfs.exists(filePath));
+      assertTrue(directDfs.exists(filePath));
+      assertTrue(dfs.exists(filePath));
 
       // This will test RPC to a Resource Manager
       fs = FileSystem.get(sconf);
@@ -95,7 +97,7 @@ public class TestMRCJCSocketFactory {
       client = new JobClient(jconf);
 
       JobStatus[] jobs = client.jobsToComplete();
-      Assert.assertTrue(jobs.length == 0);
+      assertTrue(jobs.length == 0);
 
     } finally {
       closeClient(client);

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java

@@ -22,8 +22,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.test.GenericTestUtils;
 
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
 
 import java.io.File;
 import java.io.IOException;
@@ -64,7 +64,7 @@ public abstract class ClusterMapReduceTestCase {
    *
    * @throws Exception
    */
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     startCluster(true, null);
   }
@@ -125,7 +125,7 @@ public abstract class ClusterMapReduceTestCase {
    *
    * @throws Exception
    */
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     stopCluster();
   }

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.mapred;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
 
 import java.io.IOException;
 
@@ -139,7 +139,7 @@ public abstract class HadoopTestCase {
    *
    * @throws Exception
    */
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     if (localFS) {
       fileSystem = FileSystem.getLocal(new JobConf());
@@ -163,7 +163,7 @@ public abstract class HadoopTestCase {
    *
    * @throws Exception
    */
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     try {
       if (mrCluster != null) {

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java

@@ -32,7 +32,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
 
 import java.net.URI;
 
-import org.junit.Assert;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class MRCaching {
   static String testStr = "This is a test file " + "used for testing caching "
@@ -299,13 +299,12 @@ public class MRCaching {
     String configValues = job.get(configKey, "");
     System.out.println(configKey + " -> " + configValues);
     String[] realSizes = StringUtils.getStrings(configValues);
-    Assert.assertEquals("Number of files for "+ configKey,
-                        expectedSizes.length, realSizes.length);
+    assertEquals(expectedSizes.length, realSizes.length, "Number of files for "+ configKey);
 
     for (int i=0; i < expectedSizes.length; ++i) {
       long actual = Long.valueOf(realSizes[i]);
       long expected = expectedSizes[i];
-      Assert.assertEquals("File "+ i +" for "+ configKey, expected, actual);
+      assertEquals(expected, actual, "File "+ i +" for "+ configKey);
     }
   }
 }

+ 8 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java

@@ -35,11 +35,11 @@ import javax.servlet.ServletException;
 import java.io.IOException;
 import java.io.DataOutputStream;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import org.junit.Before;
-import org.junit.After;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
 
 
 /**
@@ -132,7 +132,7 @@ public abstract class NotificationTestCase extends HadoopTestCase {
         return;
       }
       failureCounter++;
-      assertTrue("The request (" + query + ") does not contain " + expected, false);
+      assertTrue(false, "The request (" + query + ") does not contain " + expected);
     }
   }
 
@@ -149,13 +149,13 @@ public abstract class NotificationTestCase extends HadoopTestCase {
     return conf;
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     super.setUp();
     startHttpServer();
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     stopHttpServer();
     NotificationServlet.counter = 0;

+ 9 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java

@@ -38,16 +38,17 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNotNull;
-@Ignore
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+
+@Disabled
 public class TestBadRecords extends ClusterMapReduceTestCase {
   
   private static final Logger LOG =
@@ -61,7 +62,7 @@ public class TestBadRecords extends ClusterMapReduceTestCase {
   
   private List<String> input;
 
-  @BeforeClass
+  @BeforeAll
   public static void setupClass() throws Exception {
     setupClassBase(TestBadRecords.class);
   }

+ 9 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java

@@ -146,11 +146,13 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
 public class TestClientRedirect {
 
   static {
@@ -200,7 +202,7 @@ public class TestClientRedirect {
     org.apache.hadoop.mapreduce.Counters counters =
         cluster.getJob(jobID).getCounters();
     validateCounters(counters);
-    Assert.assertTrue(amContact);
+    assertTrue(amContact);
 
     LOG.info("Sleeping for 5 seconds before stop for" +
     " the client socket to not get EOF immediately..");
@@ -218,7 +220,7 @@ public class TestClientRedirect {
     // Same client
     //results are returned from fake (not started job)
     counters = cluster.getJob(jobID).getCounters();
-    Assert.assertEquals(0, counters.countCounters());
+    assertEquals(0, counters.countCounters());
     Job job = cluster.getJob(jobID);
     org.apache.hadoop.mapreduce.TaskID taskId =
       new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0);
@@ -242,7 +244,7 @@ public class TestClientRedirect {
 
     counters = cluster.getJob(jobID).getCounters();
     validateCounters(counters);
-    Assert.assertTrue(amContact);
+    assertTrue(amContact);
 
     // Stop the AM. It is not even restarting. So it should be treated as
     // completed.
@@ -251,7 +253,7 @@ public class TestClientRedirect {
     // Same client
     counters = cluster.getJob(jobID).getCounters();
     validateCounters(counters);
-    Assert.assertTrue(hsContact);
+    assertTrue(hsContact);
 
     rmService.stop();
     historyService.stop();
@@ -267,7 +269,7 @@ public class TestClientRedirect {
         LOG.info("Counter is " + itc.next().getDisplayName());
       }
     }
-    Assert.assertEquals(1, counters.countCounters());
+    assertEquals(1, counters.countCounters());
   }
 
   class RMService extends AbstractService implements ApplicationClientProtocol {

+ 118 - 82
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java

@@ -58,35 +58,36 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.util.Records;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 /**
  * Tests for ClientServiceDelegate.java
  */
-
-@RunWith(value = Parameterized.class)
 public class TestClientServiceDelegate {
   private JobID oldJobId = JobID.forName("job_1315895242400_2");
   private org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter
       .toYarn(oldJobId);
   private boolean isAMReachableFromClient;
 
-  public TestClientServiceDelegate(boolean isAMReachableFromClient) {
-    this.isAMReachableFromClient = isAMReachableFromClient;
+  public void initTestClientServiceDelegate(boolean pIsAMReachableFromClient) {
+    this.isAMReachableFromClient = pIsAMReachableFromClient;
   }
 
-  @Parameters
   public static Collection<Object[]> data() {
     Object[][] data = new Object[][] { { true }, { false } };
     return Arrays.asList(data);
   }
 
-  @Test
-  public void testUnknownAppInRM() throws Exception {
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testUnknownAppInRM(boolean pIsAMReachableFromClient) throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
     when(historyServerProxy.getJobReport(getJobReportRequest())).thenReturn(
         getJobReportResponse());
@@ -94,12 +95,14 @@ public class TestClientServiceDelegate {
         historyServerProxy, getRMDelegate());
 
     JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
+    assertNotNull(jobStatus);
   }
 
-  @Test
-  public void testRemoteExceptionFromHistoryServer() throws Exception {
-
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testRemoteExceptionFromHistoryServer(boolean pIsAMReachableFromClient)
+      throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
     when(historyServerProxy.getJobReport(getJobReportRequest())).thenThrow(
         new IOException("Job ID doesnot Exist"));
@@ -113,16 +116,18 @@ public class TestClientServiceDelegate {
 
     try {
       clientServiceDelegate.getJobStatus(oldJobId);
-      Assert.fail("Invoke should throw exception after retries.");
+      fail("Invoke should throw exception after retries.");
     } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains(
+      assertTrue(e.getMessage().contains(
           "Job ID doesnot Exist"));
     }
   }
 
-  @Test
-  public void testRetriesOnConnectionFailure() throws Exception {
-
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testRetriesOnConnectionFailure(boolean pIsAMReachableFromClient)
+      throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
     when(historyServerProxy.getJobReport(getJobReportRequest())).thenThrow(
         new RuntimeException("1")).thenThrow(new RuntimeException("2"))       
@@ -136,13 +141,16 @@ public class TestClientServiceDelegate {
         historyServerProxy, rm);
 
     JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
+    assertNotNull(jobStatus);
     verify(historyServerProxy, times(3)).getJobReport(
         any(GetJobReportRequest.class));
   }
 
-  @Test
-  public void testRetriesOnAMConnectionFailures() throws Exception {
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testRetriesOnAMConnectionFailures(boolean pIsAMReachableFromClient)
+      throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     if (!isAMReachableFromClient) {
       return;
     }
@@ -175,16 +183,19 @@ public class TestClientServiceDelegate {
 
     JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
 
-    Assert.assertNotNull(jobStatus);
+    assertNotNull(jobStatus);
     // assert maxClientRetry is not decremented.
-    Assert.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES,
+    assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES,
       MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES), clientServiceDelegate
       .getMaxClientRetry());
     verify(amProxy, times(5)).getJobReport(any(GetJobReportRequest.class));
   }
 
-  @Test
-  public void testNoRetryOnAMAuthorizationException() throws Exception {
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testNoRetryOnAMAuthorizationException(boolean pIsAMReachableFromClient)
+      throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     if (!isAMReachableFromClient) {
       return;
     }
@@ -213,27 +224,30 @@ public class TestClientServiceDelegate {
 
     try {
       clientServiceDelegate.getJobStatus(oldJobId);
-      Assert.fail("Exception should be thrown upon AuthorizationException");
+      fail("Exception should be thrown upon AuthorizationException");
     } catch (IOException e) {
-      Assert.assertEquals(AuthorizationException.class.getName() + ": Denied",
+      assertEquals(AuthorizationException.class.getName() + ": Denied",
           e.getMessage());
     }
 
     // assert maxClientRetry is not decremented.
-    Assert.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES,
+    assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES,
       MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES), clientServiceDelegate
       .getMaxClientRetry());
     verify(amProxy, times(1)).getJobReport(any(GetJobReportRequest.class));
   }
 
-  @Test
-  public void testHistoryServerNotConfigured() throws Exception {
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testHistoryServerNotConfigured(
+      boolean pIsAMReachableFromClient) throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     //RM doesn't have app report and job History Server is not configured
     ClientServiceDelegate clientServiceDelegate = getClientServiceDelegate(
         null, getRMDelegate());
     JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertEquals("N/A", jobStatus.getUsername());
-    Assert.assertEquals(JobStatus.State.PREP, jobStatus.getState());
+    assertEquals("N/A", jobStatus.getUsername());
+    assertEquals(JobStatus.State.PREP, jobStatus.getState());
 
     //RM has app report and job History Server is not configured
     ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class);
@@ -243,12 +257,15 @@ public class TestClientServiceDelegate {
 
     clientServiceDelegate = getClientServiceDelegate(null, rm);
     jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertEquals(applicationReport.getUser(), jobStatus.getUsername());
-    Assert.assertEquals(JobStatus.State.SUCCEEDED, jobStatus.getState());
+    assertEquals(applicationReport.getUser(), jobStatus.getUsername());
+    assertEquals(JobStatus.State.SUCCEEDED, jobStatus.getState());
   }
-  
-  @Test
-  public void testJobReportFromHistoryServer() throws Exception {                                 
+
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testJobReportFromHistoryServer(
+      boolean pIsAMReachableFromClient) throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);                           
     when(historyServerProxy.getJobReport(getJobReportRequest())).thenReturn(                      
         getJobReportResponseFromHistoryServer());                                                 
@@ -259,15 +276,18 @@ public class TestClientServiceDelegate {
         historyServerProxy, rm);
 
     JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
-    Assert.assertEquals("TestJobFilePath", jobStatus.getJobFile());                               
-    Assert.assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl());                    
-    Assert.assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f);
-    Assert.assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f);
+    assertNotNull(jobStatus);
+    assertEquals("TestJobFilePath", jobStatus.getJobFile());
+    assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl());
+    assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f);
+    assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f);
   }
-  
-  @Test
-  public void testCountersFromHistoryServer() throws Exception {                                 
+
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testCountersFromHistoryServer(
+      boolean pIsAMReachableFromClient) throws Exception {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);                           
     when(historyServerProxy.getCounters(getCountersRequest())).thenReturn(                      
         getCountersResponseFromHistoryServer());
@@ -278,12 +298,16 @@ public class TestClientServiceDelegate {
         historyServerProxy, rm);
 
     Counters counters = TypeConverter.toYarn(clientServiceDelegate.getJobCounters(oldJobId));
-    Assert.assertNotNull(counters);
-    Assert.assertEquals(1001, counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue());                               
+    assertNotNull(counters);
+    assertEquals(1001,
+        counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue());
   }
 
-  @Test
-  public void testReconnectOnAMRestart() throws IOException {
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testReconnectOnAMRestart(
+      boolean pIsAMReachableFromClient) throws IOException {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     //test not applicable when AM not reachable
     //as instantiateAMProxy is not called at all
     if(!isAMReachableFromClient) {
@@ -338,23 +362,26 @@ public class TestClientServiceDelegate {
         clientServiceDelegate).instantiateAMProxy(any(InetSocketAddress.class));
 
     JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
-    Assert.assertEquals("jobName-firstGen", jobStatus.getJobName());
+    assertNotNull(jobStatus);
+    assertEquals("jobName-firstGen", jobStatus.getJobName());
 
     jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
-    Assert.assertEquals("jobName-secondGen", jobStatus.getJobName());
+    assertNotNull(jobStatus);
+    assertEquals("jobName-secondGen", jobStatus.getJobName());
 
     jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
-    Assert.assertEquals("jobName-secondGen", jobStatus.getJobName());
+    assertNotNull(jobStatus);
+    assertEquals("jobName-secondGen", jobStatus.getJobName());
 
     verify(clientServiceDelegate, times(2)).instantiateAMProxy(
         any(InetSocketAddress.class));
   }
-  
-  @Test
-  public void testAMAccessDisabled() throws IOException {
+
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testAMAccessDisabled(
+      boolean pIsAMReachableFromClient) throws IOException {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     //test only applicable when AM not reachable
     if(isAMReachableFromClient) {
       return;
@@ -379,56 +406,65 @@ public class TestClientServiceDelegate {
         historyServerProxy, rmDelegate));
 
     JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
-    Assert.assertEquals("N/A", jobStatus.getJobName());
+    assertNotNull(jobStatus);
+    assertEquals("N/A", jobStatus.getJobName());
     
     verify(clientServiceDelegate, times(0)).instantiateAMProxy(
         any(InetSocketAddress.class));
 
     // Should not reach AM even for second and third times too.
     jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
-    Assert.assertEquals("N/A", jobStatus.getJobName());    
+    assertNotNull(jobStatus);
+    assertEquals("N/A", jobStatus.getJobName());
     verify(clientServiceDelegate, times(0)).instantiateAMProxy(
         any(InetSocketAddress.class));
     jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus);
-    Assert.assertEquals("N/A", jobStatus.getJobName());    
+    assertNotNull(jobStatus);
+    assertEquals("N/A", jobStatus.getJobName());
     verify(clientServiceDelegate, times(0)).instantiateAMProxy(
         any(InetSocketAddress.class));
 
     // The third time around, app is completed, so should go to JHS
     JobStatus jobStatus1 = clientServiceDelegate.getJobStatus(oldJobId);
-    Assert.assertNotNull(jobStatus1);
-    Assert.assertEquals("TestJobFilePath", jobStatus1.getJobFile());                               
-    Assert.assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl());                    
-    Assert.assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f);
-    Assert.assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f);
+    assertNotNull(jobStatus1);
+    assertEquals("TestJobFilePath", jobStatus1.getJobFile());
+    assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl());
+    assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f);
+    assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f);
     
     verify(clientServiceDelegate, times(0)).instantiateAMProxy(
         any(InetSocketAddress.class));
   }
-  
-  @Test
-  public void testRMDownForJobStatusBeforeGetAMReport() throws IOException {
+
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testRMDownForJobStatusBeforeGetAMReport(
+      boolean pIsAMReachableFromClient) throws IOException {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     Configuration conf = new YarnConfiguration();
     testRMDownForJobStatusBeforeGetAMReport(conf,
         MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES);
   }
 
-  @Test
-  public void testRMDownForJobStatusBeforeGetAMReportWithRetryTimes()
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testRMDownForJobStatusBeforeGetAMReportWithRetryTimes(
+      boolean pIsAMReachableFromClient)
       throws IOException {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     Configuration conf = new YarnConfiguration();
     conf.setInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, 2);
     testRMDownForJobStatusBeforeGetAMReport(conf, conf.getInt(
         MRJobConfig.MR_CLIENT_MAX_RETRIES,
         MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES));
   }
-  
-  @Test
-  public void testRMDownRestoreForJobStatusBeforeGetAMReport()
+
+  @MethodSource("data")
+  @ParameterizedTest
+  public void testRMDownRestoreForJobStatusBeforeGetAMReport(
+      boolean pIsAMReachableFromClient)
       throws IOException {
+    initTestClientServiceDelegate(pIsAMReachableFromClient);
     Configuration conf = new YarnConfiguration();
     conf.setInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, 3);
 
@@ -451,7 +487,7 @@ public class TestClientServiceDelegate {
       JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
       verify(rmDelegate, times(3)).getApplicationReport(
           any(ApplicationId.class));
-      Assert.assertNotNull(jobStatus);
+      assertNotNull(jobStatus);
     } catch (YarnException e) {
       throw new IOException(e);
     }
@@ -476,7 +512,7 @@ public class TestClientServiceDelegate {
           conf, rmDelegate, oldJobId, historyServerProxy);
       try {
         clientServiceDelegate.getJobStatus(oldJobId);
-        Assert.fail("It should throw exception after retries");
+        fail("It should throw exception after retries");
       } catch (IOException e) {
         System.out.println("fail to get job status,and e=" + e.toString());
       }

+ 7 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java

@@ -30,16 +30,16 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
 
-  @BeforeClass
+  @BeforeAll
   public static void setupClass() throws Exception {
     setupClassBase(TestClusterMapReduceTestCase.class);
   }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java

@@ -21,7 +21,7 @@ import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.*;
 import java.util.*;

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java

@@ -26,11 +26,11 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapred.lib.CombineFileRecordReader;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestCombineFileInputFormat {
   private static final Logger LOG =
@@ -79,6 +79,6 @@ public class TestCombineFileInputFormat {
     LOG.info("Trying to getSplits with splits = " + SIZE_SPLITS);
     InputSplit[] splits = format.getSplits(job, SIZE_SPLITS);
     LOG.info("Got getSplits = " + splits.length);
-    assertEquals("splits == " + SIZE_SPLITS, SIZE_SPLITS, splits.length);
+    assertEquals(SIZE_SPLITS, splits.length, "splits == " + SIZE_SPLITS);
   }
 }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineOutputCollector.java

@@ -32,7 +32,7 @@ import org.apache.hadoop.mapred.IFile.Writer;
 import org.apache.hadoop.mapred.Task.CombineOutputCollector;
 import org.apache.hadoop.mapred.Task.TaskReporter;
 import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestCombineOutputCollector {
   private CombineOutputCollector<String, Integer> coc;

+ 11 - 9
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.mapred;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 import java.io.IOException;
 import java.util.BitSet;
@@ -33,7 +33,8 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -57,7 +58,8 @@ public class TestCombineSequenceFileInputFormat {
       System.getProperty("test.build.data", "/tmp"),
       "TestCombineSequenceFileInputFormat"));
 
-  @Test(timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testFormat() throws Exception {
     JobConf job = new JobConf(conf);
 
@@ -92,10 +94,10 @@ public class TestCombineSequenceFileInputFormat {
 
       // we should have a single split as the length is comfortably smaller than
       // the block size
-      assertEquals("We got more than one splits!", 1, splits.length);
+      assertEquals(1, splits.length, "We got more than one splits!");
       InputSplit split = splits[0];
-      assertEquals("It should be CombineFileSplit",
-        CombineFileSplit.class, split.getClass());
+      assertEquals(CombineFileSplit.class, split.getClass(),
+          "It should be CombineFileSplit");
 
       // check each split
       BitSet bits = new BitSet(length);
@@ -103,13 +105,13 @@ public class TestCombineSequenceFileInputFormat {
         format.getRecordReader(split, job, reporter);
       try {
         while (reader.next(key, value)) {
-          assertFalse("Key in multiple partitions.", bits.get(key.get()));
+          assertFalse(bits.get(key.get()), "Key in multiple partitions.");
           bits.set(key.get());
         }
       } finally {
         reader.close();
       }
-      assertEquals("Some keys in no partition.", length, bits.cardinality());
+      assertEquals(length, bits.cardinality(), "Some keys in no partition.");
     }
   }
 

+ 18 - 15
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java

@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.mapred;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.IOException;
 import java.io.OutputStream;
@@ -40,7 +40,8 @@ import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapred.lib.CombineTextInputFormat;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -67,7 +68,8 @@ public class TestCombineTextInputFormat {
   // A reporter that does nothing
   private static final Reporter voidReporter = Reporter.NULL;
 
-  @Test(timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testFormat() throws Exception {
     JobConf job = new JobConf(defaultConf);
 
@@ -96,10 +98,10 @@ public class TestCombineTextInputFormat {
 
       // we should have a single split as the length is comfortably smaller than
       // the block size
-      assertEquals("We got more than one splits!", 1, splits.length);
+      assertEquals(1, splits.length, "We got more than one splits!");
       InputSplit split = splits[0];
-      assertEquals("It should be CombineFileSplit",
-        CombineFileSplit.class, split.getClass());
+      assertEquals(CombineFileSplit.class, split.getClass(),
+          "It should be CombineFileSplit");
 
       // check the split
       BitSet bits = new BitSet(length);
@@ -115,7 +117,7 @@ public class TestCombineTextInputFormat {
             LOG.warn("conflict with " + v +
                      " at position "+reader.getPos());
           }
-          assertFalse("Key in multiple partitions.", bits.get(v));
+          assertFalse(bits.get(v), "Key in multiple partitions.");
           bits.set(v);
           count++;
         }
@@ -123,7 +125,7 @@ public class TestCombineTextInputFormat {
       } finally {
         reader.close();
       }
-      assertEquals("Some keys in no partition.", length, bits.cardinality());
+      assertEquals(length, bits.cardinality(), "Some keys in no partition.");
     }
   }
 
@@ -206,7 +208,8 @@ public class TestCombineTextInputFormat {
   /**
    * Test using the gzip codec for reading
    */
-  @Test(timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testGzip() throws IOException {
     JobConf job = new JobConf(defaultConf);
     CompressionCodec gzip = new GzipCodec();
@@ -219,9 +222,9 @@ public class TestCombineTextInputFormat {
     FileInputFormat.setInputPaths(job, workDir);
     CombineTextInputFormat format = new CombineTextInputFormat();
     InputSplit[] splits = format.getSplits(job, 100);
-    assertEquals("compressed splits == 1", 1, splits.length);
+    assertEquals(1, splits.length, "compressed splits == 1");
     List<Text> results = readSplit(format, splits[0], job);
-    assertEquals("splits[0] length", 8, results.size());
+    assertEquals(8, results.size(), "splits[0] length");
 
     final String[] firstList =
       {"the quick", "brown", "fox jumped", "over", " the lazy", " dog"};
@@ -239,11 +242,11 @@ public class TestCombineTextInputFormat {
   private static void testResults(List<Text> results, String[] first,
     String[] second) {
     for (int i = 0; i < first.length; i++) {
-      assertEquals("splits[0]["+i+"]", first[i], results.get(i).toString());
+      assertEquals(first[i], results.get(i).toString(), "splits[0][" + i + "]");
     }
     for (int i = 0; i < second.length; i++) {
       int j = i + first.length;
-      assertEquals("splits[0]["+j+"]", second[i], results.get(j).toString());
+      assertEquals(second[i], results.get(j).toString(), "splits[0][" + j + "]");
     }
   }
 }

+ 8 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java

@@ -26,15 +26,15 @@ import org.apache.hadoop.fs.*;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * check for the job submission  options of 
  * -libjars -files -archives
  */
-@Ignore
+@Disabled
 public class TestCommandLineJobSubmission {
   // Input output paths for this..
   // these are all dummy and does not test
@@ -59,7 +59,7 @@ public class TestCommandLineJobSubmission {
       stream.close();
       mr = new MiniMRCluster(2, fs.getUri().toString(), 1);
       File thisbuildDir = new File(buildDir, "jobCommand");
-      assertTrue("create build dir", thisbuildDir.mkdirs()); 
+      assertTrue(thisbuildDir.mkdirs(), "create build dir");
       File f = new File(thisbuildDir, "files_tmp");
       FileOutputStream fstream = new FileOutputStream(f);
       fstream.write("somestrings".getBytes());
@@ -120,13 +120,13 @@ public class TestCommandLineJobSubmission {
       
       JobConf jobConf = mr.createJobConf();
       //before running the job, verify that libjar is not in client classpath
-      assertTrue("libjar not in client classpath", loadLibJar(jobConf)==null);
+      assertTrue(loadLibJar(jobConf)==null, "libjar not in client classpath");
       int ret = ToolRunner.run(jobConf,
                                new testshell.ExternalMapReduce(), args);
       //after running the job, verify that libjar is in the client classpath
-      assertTrue("libjar added to client classpath", loadLibJar(jobConf)!=null);
+      assertTrue(loadLibJar(jobConf)!=null, "libjar added to client classpath");
       
-      assertTrue("not failed ", ret != -1);
+      assertTrue(ret != -1, "not failed ");
       f.delete();
       thisbuildDir.delete();
     } finally {

+ 7 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestComparators.java

@@ -35,11 +35,11 @@ import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapreduce.MRConfig;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 
 /**
@@ -312,7 +312,7 @@ public class TestComparators {
     }
   }
 
-  @Before
+  @BeforeEach
   public void configure() throws Exception {
     Path testdir = new Path(TEST_DIR.getAbsolutePath());
     Path inDir = new Path(testdir, "in");
@@ -355,7 +355,7 @@ public class TestComparators {
     jc = new JobClient(conf);
   }
 
-  @After
+  @AfterEach
   public void cleanup() {
     FileUtil.fullyDelete(TEST_DIR);
   }

+ 64 - 79
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java

@@ -29,8 +29,8 @@ import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,7 +43,9 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.zip.Inflater;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * Test class for concatenated {@link CompressionInputStream}.
@@ -80,7 +82,7 @@ public class TestConcatenatedCompressedInput {
     }
   }
 
-  @After
+  @AfterEach
   public void after() {
     ZlibFactory.loadNativeZLib();
   }
@@ -203,7 +205,7 @@ public class TestConcatenatedCompressedInput {
     format.configure(jobConf);
 
     InputSplit[] splits = format.getSplits(jobConf, 100);
-    assertEquals("compressed splits == 2", 2, splits.length);
+    assertEquals(2, splits.length, "compressed splits == 2");
     FileSplit tmp = (FileSplit) splits[0];
     if (tmp.getPath().getName().equals("part2.txt.gz")) {
       splits[0] = splits[1];
@@ -211,16 +213,13 @@ public class TestConcatenatedCompressedInput {
     }
 
     List<Text> results = readSplit(format, splits[0], jobConf);
-    assertEquals("splits[0] num lines", 6, results.size());
-    assertEquals("splits[0][5]", "member #3",
-                 results.get(5).toString());
+    assertEquals(6, results.size(), "splits[0] num lines");
+    assertEquals("member #3", results.get(5).toString(), "splits[0][5]");
 
     results = readSplit(format, splits[1], jobConf);
-    assertEquals("splits[1] num lines", 2, results.size());
-    assertEquals("splits[1][0]", "this is a test",
-                 results.get(0).toString());
-    assertEquals("splits[1][1]", "of gzip",
-                 results.get(1).toString());
+    assertEquals(2, results.size(), "splits[1] num lines");
+    assertEquals("this is a test", results.get(0).toString(), "splits[1][0]");
+    assertEquals("of gzip", results.get(1).toString(), "splits[1][1]");
   }
 
   /**
@@ -243,43 +242,41 @@ public class TestConcatenatedCompressedInput {
     localFs.copyFromLocalFile(fnLocal, fnHDFS);
 
     final FileInputStream in = new FileInputStream(fnLocal.toString());
-    assertEquals("concat bytes available", 148, in.available());
+    assertEquals(148, in.available(), "concat bytes available");
 
     // should wrap all of this header-reading stuff in a running-CRC wrapper
     // (did so in BuiltInGzipDecompressor; see below)
 
     byte[] compressedBuf = new byte[256];
     int numBytesRead = in.read(compressedBuf, 0, 10);
-    assertEquals("header bytes read", 10, numBytesRead);
-    assertEquals("1st byte", 0x1f, compressedBuf[0] & 0xff);
-    assertEquals("2nd byte", 0x8b, compressedBuf[1] & 0xff);
-    assertEquals("3rd byte (compression method)", 8, compressedBuf[2] & 0xff);
+    assertEquals(10, numBytesRead, "header bytes read");
+    assertEquals(0x1f, compressedBuf[0] & 0xff, "1st byte");
+    assertEquals(0x8b, compressedBuf[1] & 0xff, "2nd byte");
+    assertEquals(8, compressedBuf[2] & 0xff, "3rd byte (compression method)");
 
     byte flags = (byte)(compressedBuf[3] & 0xff);
     if ((flags & 0x04) != 0) {   // FEXTRA
       numBytesRead = in.read(compressedBuf, 0, 2);
-      assertEquals("XLEN bytes read", 2, numBytesRead);
+      assertEquals(2, numBytesRead, "XLEN bytes read");
       int xlen = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff;
       in.skip(xlen);
     }
     if ((flags & 0x08) != 0) {   // FNAME
       while ((numBytesRead = in.read()) != 0) {
-        assertFalse("unexpected end-of-file while reading filename",
-                    numBytesRead == -1);
+        assertFalse(numBytesRead == -1, "unexpected end-of-file while reading filename");
       }
     }
     if ((flags & 0x10) != 0) {   // FCOMMENT
       while ((numBytesRead = in.read()) != 0) {
-        assertFalse("unexpected end-of-file while reading comment",
-                    numBytesRead == -1);
+        assertFalse(numBytesRead == -1, "unexpected end-of-file while reading comment");
       }
     }
     if ((flags & 0xe0) != 0) {   // reserved
-      assertTrue("reserved bits are set??", (flags & 0xe0) == 0);
+      assertTrue((flags & 0xe0) == 0, "reserved bits are set??");
     }
     if ((flags & 0x02) != 0) {   // FHCRC
       numBytesRead = in.read(compressedBuf, 0, 2);
-      assertEquals("CRC16 bytes read", 2, numBytesRead);
+      assertEquals(2, numBytesRead, "CRC16 bytes read");
       int crc16 = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff;
     }
 
@@ -320,9 +317,8 @@ public class TestConcatenatedCompressedInput {
     localFs.delete(workDir, true);
     // Don't use native libs for this test
     ZlibFactory.setNativeZlibLoaded(false);
-    assertEquals("[non-native (Java) codec]",
-      org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class,
-      gzip.getDecompressorType());
+    assertEquals(org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class,
+        gzip.getDecompressorType(), "[non-native (Java) codec]");
     System.out.println(COLOR_BR_YELLOW + "testBuiltInGzipDecompressor() using" +
       " non-native (Java Inflater) Decompressor (" + gzip.getDecompressorType()
       + ")" + COLOR_NORMAL);
@@ -347,8 +343,8 @@ public class TestConcatenatedCompressedInput {
     // here's first pair of DecompressorStreams:
     final FileInputStream in1 = new FileInputStream(fnLocal1.toString());
     final FileInputStream in2 = new FileInputStream(fnLocal2.toString());
-    assertEquals("concat bytes available", 2734, in1.available());
-    assertEquals("concat bytes available", 3413, in2.available()); // w/hdr CRC
+    assertEquals(2734, in1.available(), "concat bytes available");
+    assertEquals(3413, in2.available(), "concat bytes available"); // w/hdr CRC
 
     CompressionInputStream cin2 = gzip.createInputStream(in2);
     LineReader in = new LineReader(cin2);
@@ -360,10 +356,10 @@ public class TestConcatenatedCompressedInput {
       totalBytes += numBytes;
     }
     in.close();
-    assertEquals("total uncompressed bytes in concatenated test file",
-                 5346, totalBytes);
-    assertEquals("total uncompressed lines in concatenated test file",
-                 84, lineNum);
+    assertEquals(5346, totalBytes,
+        "total uncompressed bytes in concatenated test file");
+    assertEquals(84, lineNum,
+        "total uncompressed lines in concatenated test file");
 
     ZlibFactory.loadNativeZLib();
     // test GzipZlibDecompressor (native), just to be sure
@@ -442,7 +438,7 @@ public class TestConcatenatedCompressedInput {
 
     // here's Nth pair of DecompressorStreams:
     InputSplit[] splits = format.getSplits(jConf, 100);
-    assertEquals("compressed splits == 2", 2, splits.length);
+    assertEquals(2, splits.length, "compressed splits == 2");
     FileSplit tmp = (FileSplit) splits[0];
     if (tmp.getPath()
             .getName().equals("testdata/testCompressThenConcat.txt.gz")) {
@@ -452,22 +448,18 @@ public class TestConcatenatedCompressedInput {
     }
 
     List<Text> results = readSplit(format, splits[0], jConf);
-    assertEquals("splits[0] length (num lines)", 84, results.size());
-    assertEquals("splits[0][0]",
-      "Call me Ishmael. Some years ago--never mind how long precisely--having",
-      results.get(0).toString());
-    assertEquals("splits[0][42]",
-      "Tell me, does the magnetic virtue of the needles of the compasses of",
-      results.get(42).toString());
+    assertEquals(84, results.size(), "splits[0] length (num lines)");
+    assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having",
+        results.get(0).toString(), "splits[0][0]");
+    assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of",
+        results.get(42).toString(), "splits[0][42]");
 
     results = readSplit(format, splits[1], jConf);
-    assertEquals("splits[1] length (num lines)", 84, results.size());
-    assertEquals("splits[1][0]",
-      "Call me Ishmael. Some years ago--never mind how long precisely--having",
-      results.get(0).toString());
-    assertEquals("splits[1][42]",
-      "Tell me, does the magnetic virtue of the needles of the compasses of",
-      results.get(42).toString());
+    assertEquals(84, results.size(), "splits[1] length (num lines)");
+    assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having",
+        results.get(0).toString(), "splits[1][0]");
+    assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of",
+        results.get(42).toString(), "splits[1][42]");
   }
 
   /**
@@ -501,7 +493,7 @@ public class TestConcatenatedCompressedInput {
     // [135 splits for a 208-byte file and a 62-byte file(!)]
 
     InputSplit[] splits = format.getSplits(jobConf, 100);
-    assertEquals("compressed splits == 2", 2, splits.length);
+    assertEquals(2, splits.length, "compressed splits == 2");
     FileSplit tmp = (FileSplit) splits[0];
     if (tmp.getPath().getName().equals("part2.txt.bz2")) {
       splits[0] = splits[1];
@@ -509,16 +501,13 @@ public class TestConcatenatedCompressedInput {
     }
 
     List<Text> results = readSplit(format, splits[0], jobConf);
-    assertEquals("splits[0] num lines", 6, results.size());
-    assertEquals("splits[0][5]", "member #3",
-                 results.get(5).toString());
+    assertEquals(6, results.size(), "splits[0] num lines");
+    assertEquals("member #3", results.get(5).toString(), "splits[0][5]");
 
     results = readSplit(format, splits[1], jobConf);
-    assertEquals("splits[1] num lines", 2, results.size());
-    assertEquals("splits[1][0]", "this is a test",
-                 results.get(0).toString());
-    assertEquals("splits[1][1]", "of bzip2",
-                 results.get(1).toString());
+    assertEquals(2, results.size(), "splits[1] num lines");
+    assertEquals("this is a test", results.get(0).toString(), "splits[1][0]");
+    assertEquals("of bzip2", results.get(1).toString(), "splits[1][1]");
   }
 
   /**
@@ -555,8 +544,8 @@ public class TestConcatenatedCompressedInput {
     // here's first pair of BlockDecompressorStreams:
     final FileInputStream in1 = new FileInputStream(fnLocal1.toString());
     final FileInputStream in2 = new FileInputStream(fnLocal2.toString());
-    assertEquals("concat bytes available", 2567, in1.available());
-    assertEquals("concat bytes available", 3056, in2.available());
+    assertEquals(2567, in1.available(), "concat bytes available");
+    assertEquals(3056, in2.available(), "concat bytes available");
 
     CompressionInputStream cin2 = bzip2.createInputStream(in2);
     LineReader in = new LineReader(cin2);
@@ -568,10 +557,10 @@ public class TestConcatenatedCompressedInput {
       totalBytes += numBytes;
     }
     in.close();
-    assertEquals("total uncompressed bytes in concatenated test file",
-                 5346, totalBytes);
-    assertEquals("total uncompressed lines in concatenated test file",
-                 84, lineNum);
+    assertEquals(
+                5346, totalBytes, "total uncompressed bytes in concatenated test file");
+    assertEquals(
+                84, lineNum, "total uncompressed lines in concatenated test file");
 
     // test CBZip2InputStream with lots of different input-buffer sizes
     doMultipleBzip2BufferSizes(jobConf);
@@ -646,7 +635,7 @@ public class TestConcatenatedCompressedInput {
 
     // here's Nth pair of DecompressorStreams:
     InputSplit[] splits = format.getSplits(jConf, 100);
-    assertEquals("compressed splits == 2", 2, splits.length);
+    assertEquals(2, splits.length, "compressed splits == 2");
     FileSplit tmp = (FileSplit) splits[0];
     if (tmp.getPath()
             .getName().equals("testdata/testCompressThenConcat.txt.gz")) {
@@ -657,23 +646,19 @@ public class TestConcatenatedCompressedInput {
 
     // testConcatThenCompress (single)
     List<Text> results = readSplit(format, splits[0], jConf);
-    assertEquals("splits[0] length (num lines)", 84, results.size());
-    assertEquals("splits[0][0]",
-      "Call me Ishmael. Some years ago--never mind how long precisely--having",
-      results.get(0).toString());
-    assertEquals("splits[0][42]",
-      "Tell me, does the magnetic virtue of the needles of the compasses of",
-      results.get(42).toString());
+    assertEquals(84, results.size(), "splits[0] length (num lines)");
+    assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having",
+        results.get(0).toString(), "splits[0][0]");
+    assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of",
+        results.get(42).toString(), "splits[0][42]");
 
     // testCompressThenConcat (multi)
     results = readSplit(format, splits[1], jConf);
-    assertEquals("splits[1] length (num lines)", 84, results.size());
-    assertEquals("splits[1][0]",
-      "Call me Ishmael. Some years ago--never mind how long precisely--having",
-      results.get(0).toString());
-    assertEquals("splits[1][42]",
-      "Tell me, does the magnetic virtue of the needles of the compasses of",
-      results.get(42).toString());
+    assertEquals(84, results.size(), "splits[1] length (num lines)");
+    assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having",
+        results.get(0).toString(), "splits[1][0]");
+    assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of",
+        results.get(42).toString(), "splits[1][42]");
   }
 
   private static String unquote(String in) {

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java

@@ -23,8 +23,8 @@ import org.apache.hadoop.mapred.lib.*;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper;
 import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.nio.charset.StandardCharsets;
 import java.text.NumberFormat;

+ 6 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java

@@ -21,10 +21,10 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.IOException;
 import java.io.Writer;
@@ -57,12 +57,12 @@ public class TestFileInputFormatPathFilter {
       new Path(new Path(System.getProperty("test.build.data", "."), "data"),
           "TestFileInputFormatPathFilter");
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     tearDown();
     localFs.mkdirs(workDir);
   }
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     if (localFs.exists(workDir)) {
       localFs.delete(workDir, true);

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java

@@ -30,8 +30,8 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.util.Iterator;
 
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestFileOutputFormat extends HadoopTestCase {
 

+ 44 - 34
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java

@@ -33,12 +33,14 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.compress.*;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestFixedLengthInputFormat {
 
@@ -54,7 +56,7 @@ public class TestFixedLengthInputFormat {
   private static char[] chars;
   private static Random charRand;
 
-  @BeforeClass
+  @BeforeAll
   public static void onlyOnce() {
     try {
       defaultConf = new Configuration();
@@ -77,7 +79,8 @@ public class TestFixedLengthInputFormat {
    * 20 random tests of various record, file, and split sizes.  All tests have
    * uncompressed file as input.
    */
-  @Test (timeout=500000)
+  @Test
+  @Timeout(value = 500)
   public void testFormat() throws IOException {
     runRandomTests(null);
   }
@@ -86,7 +89,8 @@ public class TestFixedLengthInputFormat {
    * 20 random tests of various record, file, and split sizes.  All tests have
    * compressed file as input.
    */
-  @Test (timeout=500000)
+  @Test
+  @Timeout(value = 500)
   public void testFormatCompressedIn() throws IOException {
     runRandomTests(new GzipCodec());
   }
@@ -94,7 +98,8 @@ public class TestFixedLengthInputFormat {
   /**
    * Test with no record length set.
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testNoRecordLength() throws IOException {
     localFs.delete(workDir, true);
     Path file = new Path(workDir, "testFormat.txt");
@@ -115,13 +120,14 @@ public class TestFixedLengthInputFormat {
         LOG.info("Exception message:" + ioe.getMessage());
       }
     }
-    assertTrue("Exception for not setting record length:", exceptionThrown);
+    assertTrue(exceptionThrown, "Exception for not setting record length:");
   }
 
   /**
    * Test with record length set to 0
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testZeroRecordLength() throws IOException {
     localFs.delete(workDir, true);
     Path file = new Path(workDir, "testFormat.txt");
@@ -143,13 +149,14 @@ public class TestFixedLengthInputFormat {
         LOG.info("Exception message:" + ioe.getMessage());
       }
     }
-    assertTrue("Exception for zero record length:", exceptionThrown);
+    assertTrue(exceptionThrown, "Exception for zero record length:");
   }
 
   /**
    * Test with record length set to a negative value
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testNegativeRecordLength() throws IOException {
     localFs.delete(workDir, true);
     Path file = new Path(workDir, "testFormat.txt");
@@ -171,13 +178,14 @@ public class TestFixedLengthInputFormat {
         LOG.info("Exception message:" + ioe.getMessage());
       }
     }
-    assertTrue("Exception for negative record length:", exceptionThrown);
+    assertTrue(exceptionThrown, "Exception for negative record length:");
   }
 
   /**
    * Test with partial record at the end of a compressed input file.
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testPartialRecordCompressedIn() throws IOException {
     CompressionCodec gzip = new GzipCodec();
     runPartialRecordTest(gzip);
@@ -186,7 +194,8 @@ public class TestFixedLengthInputFormat {
   /**
    * Test with partial record at the end of an uncompressed input file.
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testPartialRecordUncompressedIn() throws IOException {
     runPartialRecordTest(null);
   }
@@ -194,7 +203,8 @@ public class TestFixedLengthInputFormat {
   /**
    * Test using the gzip codec with two input files.
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testGzipWithTwoInputs() throws IOException {
     CompressionCodec gzip = new GzipCodec();
     localFs.delete(workDir, true);
@@ -210,19 +220,19 @@ public class TestFixedLengthInputFormat {
     writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
         "ten  nine eightsevensix  five four threetwo  one  ");
     InputSplit[] splits = format.getSplits(job, 100);
-    assertEquals("compressed splits == 2", 2, splits.length);
+    assertEquals(2, splits.length, "compressed splits == 2");
     FileSplit tmp = (FileSplit) splits[0];
     if (tmp.getPath().getName().equals("part2.txt.gz")) {
       splits[0] = splits[1];
       splits[1] = tmp;
     }
     List<String> results = readSplit(format, splits[0], job);
-    assertEquals("splits[0] length", 10, results.size());
-    assertEquals("splits[0][5]", "six  ", results.get(5));
+    assertEquals(10, results.size(), "splits[0] length");
+    assertEquals("six  ", results.get(5), "splits[0][5]");
     results = readSplit(format, splits[1], job);
-    assertEquals("splits[1] length", 10, results.size());
-    assertEquals("splits[1][0]", "ten  ", results.get(0));
-    assertEquals("splits[1][1]", "nine ", results.get(1));
+    assertEquals(10, results.size(), "splits[1] length");
+    assertEquals("ten  ", results.get(0), "splits[1][0]");
+    assertEquals("nine ", results.get(1), "splits[1][1]");
   }
 
   // Create a file containing fixed length records with random data
@@ -329,26 +339,26 @@ public class TestFixedLengthInputFormat {
         RecordReader<LongWritable, BytesWritable> reader = 
             format.getRecordReader(split, job, voidReporter);
         Class<?> clazz = reader.getClass();
-        assertEquals("RecordReader class should be FixedLengthRecordReader:", 
-            FixedLengthRecordReader.class, clazz);
+        assertEquals(FixedLengthRecordReader.class, clazz,
+            "RecordReader class should be FixedLengthRecordReader:");
         // Plow through the records in this split
         while (reader.next(key, value)) {
-          assertEquals("Checking key", (long)(recordNumber*recordLength),
-              key.get());
+          assertEquals((long)(recordNumber*recordLength),
+              key.get(), "Checking key");
           String valueString =
               new String(value.getBytes(), 0, value.getLength());
-          assertEquals("Checking record length:", recordLength,
-              value.getLength());
-          assertTrue("Checking for more records than expected:",
-              recordNumber < totalRecords);
+          assertEquals(recordLength,
+              value.getLength(), "Checking record length:");
+          assertTrue(recordNumber < totalRecords,
+              "Checking for more records than expected:");
           String origRecord = recordList.get(recordNumber);
-          assertEquals("Checking record content:", origRecord, valueString);
+          assertEquals(origRecord, valueString, "Checking record content:");
           recordNumber++;
         }
         reader.close();
       }
-      assertEquals("Total original records should be total read records:",
-          recordList.size(), recordNumber);
+      assertEquals(recordList.size(), recordNumber,
+          "Total original records should be total read records:");
     }
   }
 
@@ -403,7 +413,7 @@ public class TestFixedLengthInputFormat {
             "one  two  threefour five six  seveneightnine ten");
     InputSplit[] splits = format.getSplits(job, 100);
     if (codec != null) {
-      assertEquals("compressed splits == 1", 1, splits.length);
+      assertEquals(1, splits.length, "compressed splits == 1");
     }
     boolean exceptionThrown = false;
     for (InputSplit split : splits) {
@@ -414,7 +424,7 @@ public class TestFixedLengthInputFormat {
         LOG.info("Exception message:" + ioe.getMessage());
       }
     }
-    assertTrue("Exception for partial record:", exceptionThrown);
+    assertTrue(exceptionThrown, "Exception for partial record:");
   }
 
 }

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.mapred;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.net.NetworkTopology;
 
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestGetSplitHosts {
   @Test

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java

@@ -27,8 +27,9 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 public class TestIFile {
 
   @Test

+ 5 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java

@@ -21,15 +21,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.IOException;
 import java.io.OutputStream;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestIFileStreams {
   @Test
@@ -75,7 +75,7 @@ public class TestIFileStreams {
       }
       ifis.close();
     } catch (ChecksumException e) {
-      assertEquals("Unexpected bad checksum", DLEN - 1, i);
+      assertEquals(DLEN - 1, i, "Unexpected bad checksum");
       return;
     }
     fail("Did not detect bad data in checksum");
@@ -99,7 +99,7 @@ public class TestIFileStreams {
       }
       ifis.close();
     } catch (ChecksumException e) {
-      assertEquals("Checksum before close", i, DLEN - 8);
+      assertEquals(i, DLEN - 8, "Checksum before close");
       return;
     }
     fail("Did not detect bad data in checksum");

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java

@@ -21,8 +21,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestInputPath {
   @Test

+ 14 - 15
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java

@@ -35,9 +35,9 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.serializer.JavaSerializationComparator;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestJavaSerialization {
 
@@ -58,8 +58,8 @@ public class TestJavaSerialization {
       StringTokenizer st = new StringTokenizer(value.toString());
       while (st.hasMoreTokens()) {
         String token = st.nextToken();
-        assertTrue("Invalid token; expected 'a' or 'b', got " + token,
-          token.equals("a") || token.equals("b"));
+        assertTrue(token.equals("a") || token.equals("b"),
+            "Invalid token; expected 'a' or 'b', got " + token);
         output.collect(token, 1L);
       }
     }
@@ -124,9 +124,9 @@ public class TestJavaSerialization {
 
     String inputFileContents =
         FileUtils.readFileToString(new File(INPUT_FILE.toUri().getPath()));
-    assertTrue("Input file contents not as expected; contents are '"
-        + inputFileContents + "', expected \"b a\n\" ",
-      inputFileContents.equals("b a\n"));
+    assertTrue(inputFileContents.equals("b a\n"),
+        "Input file contents not as expected; contents are '"
+        + inputFileContents + "', expected \"b a\n\" ");
 
     JobClient.runJob(conf);
 
@@ -137,13 +137,12 @@ public class TestJavaSerialization {
     try (InputStream is = fs.open(outputFiles[0])) {
       String reduceOutput = org.apache.commons.io.IOUtils.toString(is, StandardCharsets.UTF_8);
       String[] lines = reduceOutput.split("\n");
-      assertEquals("Unexpected output; received output '" + reduceOutput + "'",
-          "a\t1", lines[0]);
-      assertEquals("Unexpected output; received output '" + reduceOutput + "'",
-          "b\t1", lines[1]);
-      assertEquals(
-          "Reduce output has extra lines; output is '" + reduceOutput + "'", 2,
-          lines.length);
+      assertEquals("a\t1", lines[0],
+          "Unexpected output; received output '" + reduceOutput + "'");
+      assertEquals("b\t1", lines[1],
+          "Unexpected output; received output '" + reduceOutput + "'");
+      assertEquals(2, lines.length,
+          "Reduce output has extra lines; output is '" + reduceOutput + "'");
     }
   }
 

+ 23 - 20
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java

@@ -30,13 +30,15 @@ import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import org.slf4j.LoggerFactory;
 import org.slf4j.Logger;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * A JUnit test to test Map-Reduce job cleanup.
@@ -57,7 +59,7 @@ public class TestJobCleanup {
   private static final Logger LOG =
       LoggerFactory.getLogger(TestJobCleanup.class);
 
-  @BeforeClass
+  @BeforeAll
   public static void setUp() throws IOException {
     JobConf conf = new JobConf();
     fileSys = FileSystem.get(conf);
@@ -82,7 +84,7 @@ public class TestJobCleanup {
     fileSys.mkdirs(emptyInDir);
   }
 
-  @AfterClass
+  @AfterAll
   public static void tearDown() throws Exception {
     if (fileSys != null) {
       // fileSys.delete(new Path(TEST_ROOT_DIR), true);
@@ -169,14 +171,15 @@ public class TestJobCleanup {
 
     LOG.info("Job finished : " + job.isComplete());
     Path testFile = new Path(outDir, filename);
-    assertTrue("Done file \"" + testFile + "\" missing for job " + id,
-        fileSys.exists(testFile));
+    assertTrue(fileSys.exists(testFile),
+        "Done file \"" + testFile + "\" missing for job " + id);
 
     // check if the files from the missing set exists
     for (String ex : exclude) {
       Path file = new Path(outDir, ex);
-      assertFalse("File " + file + " should not be present for successful job "
-          + id, fileSys.exists(file));
+      assertFalse(fileSys.exists(file),
+          "File " + file + " should not be present for successful job "
+          + id);
     }
   }
 
@@ -196,19 +199,19 @@ public class TestJobCleanup {
     RunningJob job = jobClient.submitJob(jc);
     JobID id = job.getID();
     job.waitForCompletion();
-    assertEquals("Job did not fail", JobStatus.FAILED, job.getJobState());
+    assertEquals(JobStatus.FAILED, job.getJobState(), "Job did not fail");
 
     if (fileName != null) {
       Path testFile = new Path(outDir, fileName);
-      assertTrue("File " + testFile + " missing for failed job " + id,
-          fileSys.exists(testFile));
+      assertTrue(fileSys.exists(testFile),
+          "File " + testFile + " missing for failed job " + id);
     }
 
     // check if the files from the missing set exists
     for (String ex : exclude) {
       Path file = new Path(outDir, ex);
-      assertFalse("File " + file + " should not be present for failed job "
-          + id, fileSys.exists(file));
+      assertFalse(fileSys.exists(file),
+          "File " + file + " should not be present for failed job " + id);
     }
   }
 
@@ -242,19 +245,19 @@ public class TestJobCleanup {
     job.killJob(); // kill the job
 
     job.waitForCompletion(); // wait for the job to complete
-    assertEquals("Job was not killed", JobStatus.KILLED, job.getJobState());
+    assertEquals(JobStatus.KILLED, job.getJobState(), "Job was not killed");
 
     if (fileName != null) {
       Path testFile = new Path(outDir, fileName);
-      assertTrue("File " + testFile + " missing for job " + id,
-          fileSys.exists(testFile));
+      assertTrue(fileSys.exists(testFile),
+          "File " + testFile + " missing for job " + id);
     }
 
     // check if the files from the missing set exists
     for (String ex : exclude) {
       Path file = new Path(outDir, ex);
-      assertFalse("File " + file + " should not be present for killed job "
-          + id, fileSys.exists(file));
+      assertFalse(fileSys.exists(file),
+          "File " + file + " should not be present for killed job " + id);
     }
   }
 

+ 6 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java

@@ -19,9 +19,10 @@
 package org.apache.hadoop.mapred;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.ArgumentMatchers.isA;
 import static org.mockito.Mockito.atLeastOnce;
 import static org.mockito.Mockito.mock;
@@ -40,8 +41,7 @@ import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskReport;
 import org.apache.hadoop.mapreduce.TaskType;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 @SuppressWarnings("deprecation")
 public class TestJobClients {
@@ -189,7 +189,7 @@ public class TestJobClients {
     client.displayJobList(new JobStatus[] {mockJobStatus}, new PrintWriter(out));
     String commandLineOutput = out.toString();
     System.out.println(commandLineOutput);
-    Assert.assertTrue(commandLineOutput.contains("Total jobs:1"));
+    assertTrue(commandLineOutput.contains("Total jobs:1"));
 
     verify(mockJobStatus, atLeastOnce()).getJobID();
     verify(mockJobStatus).getState();

+ 19 - 19
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java

@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.mapred;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -46,9 +46,9 @@ import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter;
 import org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * This is an wordcount application that tests the count of records
@@ -179,7 +179,7 @@ public class TestJobCounters {
     return len;
   }
 
-  @BeforeClass
+  @BeforeAll
   public static void initPaths() throws IOException {
     final Configuration conf = new Configuration();
     final Path TEST_ROOT_DIR =
@@ -207,7 +207,7 @@ public class TestJobCounters {
     createWordsFile(inFiles[2], conf);
   }
 
-  @AfterClass
+  @AfterAll
   public static void cleanup() throws IOException {
     //clean up the input and output files
     final Configuration conf = new Configuration();
@@ -528,7 +528,7 @@ public class TestJobCounters {
                     OutputCollector<WritableComparable, Writable> output,
                     Reporter reporter)
     throws IOException {
-      assertNotNull("Mapper not configured!", loader);
+      assertNotNull(loader, "Mapper not configured!");
       
       // load the memory
       loader.load();
@@ -557,7 +557,7 @@ public class TestJobCounters {
                        OutputCollector<WritableComparable, Writable> output,
                        Reporter reporter)
     throws IOException {
-      assertNotNull("Reducer not configured!", loader);
+      assertNotNull(loader, "Reducer not configured!");
       
       // load the memory
       loader.load();
@@ -581,11 +581,11 @@ public class TestJobCounters {
     } else if (TaskType.REDUCE.equals(type)) {
       reports = client.getReduceTaskReports(id);
     }
-    
-    assertNotNull("No reports found for task type '" + type.name() 
-                  + "' in job " + id, reports);
+
+    assertNotNull(reports, "No reports found for task type '" + type.name()
+         + "' in job " + id);
     // make sure that the total number of reports match the expected
-    assertEquals("Mismatch in task id", numReports, reports.length);
+    assertEquals(numReports, reports.length, "Mismatch in task id");
     
     Counters counters = reports[taskId].getCounters();
     
@@ -632,7 +632,7 @@ public class TestJobCounters {
     RunningJob job = client.submitJob(jobConf);
     job.waitForCompletion();
     JobID jobID = job.getID();
-    assertTrue("Job " + jobID + " failed!", job.isSuccessful());
+    assertTrue(job.isSuccessful(), "Job " + jobID + " failed!");
     
     return job;
   }
@@ -708,11 +708,11 @@ public class TestJobCounters {
       System.out.println("Job2 (high memory job) reduce task heap usage: " 
                          + highMemJobReduceHeapUsage);
 
-      assertTrue("Incorrect map heap usage reported by the map task", 
-                 lowMemJobMapHeapUsage < highMemJobMapHeapUsage);
+      assertTrue(lowMemJobMapHeapUsage < highMemJobMapHeapUsage,
+          "Incorrect map heap usage reported by the map task");
 
-      assertTrue("Incorrect reduce heap usage reported by the reduce task", 
-                 lowMemJobReduceHeapUsage < highMemJobReduceHeapUsage);
+      assertTrue(lowMemJobReduceHeapUsage < highMemJobReduceHeapUsage,
+          "Incorrect reduce heap usage reported by the reduce task");
     } finally {
       // shutdown the mr cluster
       mrCluster.shutdown();

+ 5 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java

@@ -30,14 +30,14 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
 
 public class TestJobName extends ClusterMapReduceTestCase {
 
-  @BeforeClass
+  @BeforeAll
   public static void setupClass() throws Exception {
     setupClassBase(TestJobName.class);
   }

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java

@@ -28,10 +28,10 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 26 - 26
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java

@@ -26,13 +26,13 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.compress.*;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 public class TestKeyValueTextInputFormat {
   private static final Logger LOG =
@@ -102,14 +102,15 @@ public class TestKeyValueTextInputFormat {
           RecordReader<Text, Text> reader =
             format.getRecordReader(splits[j], job, reporter);
           Class readerClass = reader.getClass();
-          assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class, readerClass);        
+          assertEquals(KeyValueLineRecordReader.class, readerClass,
+              "reader class is KeyValueLineRecordReader.");
 
           Text key = reader.createKey();
           Class keyClass = key.getClass();
           Text value = reader.createValue();
           Class valueClass = value.getClass();
-          assertEquals("Key class is Text.", Text.class, keyClass);
-          assertEquals("Value class is Text.", Text.class, valueClass);
+          assertEquals(Text.class, keyClass, "Key class is Text.");
+          assertEquals(Text.class, valueClass, "Value class is Text.");
           try {
             int count = 0;
             while (reader.next(key, value)) {
@@ -120,7 +121,7 @@ public class TestKeyValueTextInputFormat {
                          " in split " + j +
                          " at position "+reader.getPos());
               }
-              assertFalse("Key in multiple partitions.", bits.get(v));
+              assertFalse(bits.get(v), "Key in multiple partitions.");
               bits.set(v);
               count++;
             }
@@ -129,7 +130,7 @@ public class TestKeyValueTextInputFormat {
             reader.close();
           }
         }
-        assertEquals("Some keys in no partition.", length, bits.cardinality());
+        assertEquals(length, bits.cardinality(), "Some keys in no partition.");
       }
 
     }
@@ -145,11 +146,12 @@ public class TestKeyValueTextInputFormat {
       in = makeStream("abcd\u20acbdcd\u20ac");
       Text line = new Text();
       in.readLine(line);
-      assertEquals("readLine changed utf8 characters",
-                   "abcd\u20acbdcd\u20ac", line.toString());
+      assertEquals("abcd\u20acbdcd\u20ac", line.toString(),
+          "readLine changed utf8 characters");
       in = makeStream("abc\u200axyz");
       in.readLine(line);
-      assertEquals("split on fake newline", "abc\u200axyz", line.toString());
+      assertEquals("abc\u200axyz", line.toString(),
+          "split on fake newline");
     } finally {
       if (in != null) {
         in.close();
@@ -163,18 +165,18 @@ public class TestKeyValueTextInputFormat {
       in = makeStream("a\nbb\n\nccc\rdddd\r\neeeee");
       Text out = new Text();
       in.readLine(out);
-      assertEquals("line1 length", 1, out.getLength());
+      assertEquals(1, out.getLength(), "line1 length");
       in.readLine(out);
-      assertEquals("line2 length", 2, out.getLength());
+      assertEquals(2, out.getLength(), "line2 length");
       in.readLine(out);
-      assertEquals("line3 length", 0, out.getLength());
+      assertEquals(0, out.getLength(), "line3 length");
       in.readLine(out);
-      assertEquals("line4 length", 3, out.getLength());
+      assertEquals(3, out.getLength(), "line4 length");
       in.readLine(out);
-      assertEquals("line5 length", 4, out.getLength());
+      assertEquals(4, out.getLength(), "line5 length");
       in.readLine(out);
-      assertEquals("line5 length", 5, out.getLength());
-      assertEquals("end of file", 0, in.readLine(out));
+      assertEquals(5, out.getLength(), "line5 length");
+      assertEquals(0, in.readLine(out), "end of file");
     } finally {
       if (in != null) {
         in.close();
@@ -236,21 +238,19 @@ public class TestKeyValueTextInputFormat {
     KeyValueTextInputFormat format = new KeyValueTextInputFormat();
     format.configure(job);
     InputSplit[] splits = format.getSplits(job, 100);
-    assertEquals("compressed splits == 2", 2, splits.length);
+    assertEquals(2, splits.length, "compressed splits == 2");
     FileSplit tmp = (FileSplit) splits[0];
     if (tmp.getPath().getName().equals("part2.txt.gz")) {
       splits[0] = splits[1];
       splits[1] = tmp;
     }
     List<Text> results = readSplit(format, splits[0], job);
-    assertEquals("splits[0] length", 6, results.size());
-    assertEquals("splits[0][5]", " dog", results.get(5).toString());
+    assertEquals(6, results.size(), "splits[0] length");
+    assertEquals(" dog", results.get(5).toString(), "splits[0][5]");
     results = readSplit(format, splits[1], job);
-    assertEquals("splits[1] length", 2, results.size());
-    assertEquals("splits[1][0]", "this is a test", 
-                 results.get(0).toString());    
-    assertEquals("splits[1][1]", "of gzip", 
-                 results.get(1).toString());    
+    assertEquals(2, results.size(), "splits[1] length");
+    assertEquals("this is a test", results.get(0).toString(), "splits[1][0]");
+    assertEquals("of gzip", results.get(1).toString(), "splits[1][1]");
   }
   
   public static void main(String[] args) throws Exception {

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java

@@ -33,8 +33,8 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.LazyOutputFormat;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * A JUnit test to test the Map-Reduce framework's feature to create part

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReaderJobs.java

@@ -17,7 +17,7 @@
 
 package org.apache.hadoop.mapred;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.IOException;
 import java.io.OutputStreamWriter;
@@ -28,7 +28,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestLineRecordReaderJobs {
 

+ 17 - 19
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java

@@ -37,16 +37,16 @@ import org.apache.hadoop.mapreduce.util.MRJobConfUtil;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.ToolRunner;
 
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 /**
  * check for the job submission options of
@@ -58,22 +58,20 @@ public class TestLocalJobSubmission {
 
   private static File testRootDir;
 
-  @Rule
-  public TestName unitTestName = new TestName();
   private File unitTestDir;
   private Path jarPath;
   private Configuration config;
 
-  @BeforeClass
+  @BeforeAll
   public static void setupClass() throws Exception {
     // setup the test root directory
     testRootDir =
         GenericTestUtils.setupTestRootDir(TestLocalJobSubmission.class);
   }
 
-  @Before
-  public void setup() throws IOException {
-    unitTestDir = new File(testRootDir, unitTestName.getMethodName());
+  @BeforeEach
+  public void setup(TestInfo testInfo) throws IOException {
+    unitTestDir = new File(testRootDir, testInfo.getDisplayName());
     unitTestDir.mkdirs();
     config = createConfig();
     jarPath = makeJar(new Path(unitTestDir.getAbsolutePath(), "test.jar"));
@@ -120,7 +118,7 @@ public class TestLocalJobSubmission {
       LOG.error("Job failed with {}", e.getLocalizedMessage(), e);
       fail("Job failed");
     }
-    assertEquals("dist job res is not 0:", 0, res);
+    assertEquals(0, res, "dist job res is not 0:");
   }
 
   /**
@@ -140,13 +138,13 @@ public class TestLocalJobSubmission {
           (SpillCallBackPathsFinder) IntermediateEncryptedStream
               .setSpillCBInjector(new SpillCallBackPathsFinder());
       res = ToolRunner.run(config, new SleepJob(), args);
-      Assert.assertTrue("No spill occurred",
-          spillInjector.getEncryptedSpilledFiles().size() > 0);
+      assertTrue(spillInjector.getEncryptedSpilledFiles().size() > 0,
+          "No spill occurred");
     } catch (Exception e) {
       LOG.error("Job failed with {}", e.getLocalizedMessage(), e);
       fail("Job failed");
     }
-    assertEquals("dist job res is not 0:", 0, res);
+    assertEquals(0, res, "dist job res is not 0:");
   }
 
   /**
@@ -188,7 +186,7 @@ public class TestLocalJobSubmission {
       LOG.error("Job failed with {}", e.getLocalizedMessage(), e);
       fail("Job failed");
     }
-    assertEquals("dist job res is not 0:", 0, res);
+    assertEquals(0, res, "dist job res is not 0:");
   }
 
   /**
@@ -209,7 +207,7 @@ public class TestLocalJobSubmission {
       LOG.error("Job failed with {}" + e.getLocalizedMessage(), e);
       fail("Job failed");
     }
-    assertEquals("dist job res is not 0:", 0, res);
+    assertEquals(0, res, "dist job res is not 0:");
   }
 
   private Path makeJar(Path p) throws IOException {

+ 11 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java

@@ -26,16 +26,16 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.Text;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.concurrent.TimeoutException;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -94,8 +94,8 @@ public class TestMRCJCFileInputFormat {
                   blockLocs[0].equals(splitLocs[1])));
     }
 
-    assertEquals("Expected value of " + FileInputFormat.NUM_INPUT_FILES,
-                 1, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0));
+    assertEquals(1, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0),
+        "Expected value of " + FileInputFormat.NUM_INPUT_FILES);
   }
 
   private void createInputs(FileSystem fs, Path inDir, String fileName)
@@ -135,8 +135,8 @@ public class TestMRCJCFileInputFormat {
     inFormat.configure(job);
     InputSplit[] splits = inFormat.getSplits(job, 1);
 
-    assertEquals("Expected value of " + FileInputFormat.NUM_INPUT_FILES,
-                 numFiles, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0));
+    assertEquals(numFiles, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0),
+        "Expected value of " + FileInputFormat.NUM_INPUT_FILES);
   }
   
   final Path root = new Path("/TestFileInputFormat");
@@ -191,8 +191,8 @@ public class TestMRCJCFileInputFormat {
     } catch (Exception e) {
       exceptionThrown = true;
     }
-    assertTrue("Exception should be thrown by default for scanning a "
-        + "directory with directories inside.", exceptionThrown);
+    assertTrue(exceptionThrown, "Exception should be thrown by default for scanning a "
+        + "directory with directories inside.");
 
     // Enable multi-level/recursive inputs
     job.setBoolean(FileInputFormat.INPUT_DIR_RECURSIVE, true);
@@ -314,7 +314,7 @@ public class TestMRCJCFileInputFormat {
     DFSTestUtil.waitReplication(fileSys, name, replication);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     if (dfs != null) {
       dfs.shutdown();

+ 13 - 13
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java

@@ -26,18 +26,18 @@ import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.File;
 import java.io.IOException;
 import java.net.URI;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestMRCJCFileOutputCommitter {
   private static Path outDir = new Path(GenericTestUtils.getTempPath("output"));
@@ -145,14 +145,14 @@ public class TestMRCJCFileOutputCommitter {
     committer.abortTask(tContext);
     File expectedFile = new File(new Path(committer
         .getTaskAttemptPath(tContext), file).toString());
-    assertFalse("task temp dir still exists", expectedFile.exists());
+    assertFalse(expectedFile.exists(), "task temp dir still exists");
 
     committer.abortJob(jContext, JobStatus.State.FAILED);
     expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
         .toString());
-    assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
-    assertEquals("Output directory not empty", 0, new File(outDir.toString())
-        .listFiles().length);
+    assertFalse(expectedFile.exists(), "job temp dir "+expectedFile+" still exists");
+    assertEquals(0, new File(outDir.toString())
+        .listFiles().length, "Output directory not empty");
   }
 
   public static class FakeFileSystem extends RawLocalFileSystem {
@@ -210,7 +210,7 @@ public class TestMRCJCFileOutputCommitter {
     assertNotNull(th);
     assertTrue(th instanceof IOException);
     assertTrue(th.getMessage().contains("fake delete failed"));
-    assertTrue(expectedFile + " does not exists", expectedFile.exists());
+    assertTrue(expectedFile.exists(), expectedFile + " does not exists");
 
     th = null;
     try {
@@ -221,10 +221,10 @@ public class TestMRCJCFileOutputCommitter {
     assertNotNull(th);
     assertTrue(th instanceof IOException);
     assertTrue(th.getMessage().contains("fake delete failed"));
-    assertTrue("job temp dir does not exists", jobTmpDir.exists());
+    assertTrue(jobTmpDir.exists(), "job temp dir does not exists");
   }
 
-  @After
+  @AfterEach
   public void teardown() {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }

+ 5 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java

@@ -30,12 +30,13 @@ import org.apache.hadoop.mapreduce.TestMRJobClient;
 import org.apache.hadoop.mapreduce.tools.CLI;
 import org.apache.hadoop.util.Tool;
 
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-@Ignore
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+
+@Disabled
 public class TestMRCJCJobClient extends TestMRJobClient {
 
-  @BeforeClass
+  @BeforeAll
   public static void setupClass() throws Exception {
     setupClassBase(TestMRCJCJobClient.class);
   }

+ 6 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 import java.io.File;
 import java.net.URLClassLoader;
 import java.net.URL;
@@ -29,9 +29,11 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.util.ClassUtil;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import static org.junit.Assert.*;
-@Ignore
+@Disabled
 public class TestMRCJCJobConf {
   private static final String JAR_RELATIVE_PATH =
     "build/test/mapred/testjar/testjob.jar";

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java

@@ -28,14 +28,14 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.util.MRJobConfUtil;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * Simple MapReduce to test ability of the MRAppMaster to request and use

+ 66 - 71
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java

@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.mapred;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -56,8 +58,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineR
 import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl;
 import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -85,8 +86,8 @@ public class TestMRTimelineEventHandling {
       cluster.start();
 
       //verify that the timeline service is not started.
-      Assert.assertNull("Timeline Service should not have been started",
-          cluster.getApplicationHistoryServer());
+      assertNull(cluster.getApplicationHistoryServer(),
+          "Timeline Service should not have been started");
     }
     finally {
       if(cluster != null) {
@@ -103,8 +104,8 @@ public class TestMRTimelineEventHandling {
       cluster.start();
 
       //verify that the timeline service is not started.
-      Assert.assertNull("Timeline Service should not have been started",
-          cluster.getApplicationHistoryServer());
+      assertNull(cluster.getApplicationHistoryServer(),
+          "Timeline Service should not have been started");
     }
     finally {
       if(cluster != null) {
@@ -135,33 +136,31 @@ public class TestMRTimelineEventHandling {
       Path outDir = new Path(localPathRoot, "output");
       RunningJob job =
               UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
-      Assert.assertEquals(JobStatus.SUCCEEDED,
-              job.getJobStatus().getState().getValue());
+      assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
       TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null,
-              null, null, null, null, null, null, null);
-      Assert.assertEquals(1, entities.getEntities().size());
+          null, null, null, null, null, null, null);
+      assertEquals(1, entities.getEntities().size());
       TimelineEntity tEntity = entities.getEntities().get(0);
-      Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
-      Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
-      Assert.assertEquals(EventType.AM_STARTED.toString(),
-              tEntity.getEvents().get(tEntity.getEvents().size() - 1)
-              .getEventType());
-      Assert.assertEquals(EventType.JOB_FINISHED.toString(),
-              tEntity.getEvents().get(0).getEventType());
+      assertEquals(job.getID().toString(), tEntity.getEntityId());
+      assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
+      assertEquals(EventType.AM_STARTED.toString(),
+          tEntity.getEvents().get(tEntity.getEvents().size() - 1)
+          .getEventType());
+      assertEquals(EventType.JOB_FINISHED.toString(),
+          tEntity.getEvents().get(0).getEventType());
 
       job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
-      Assert.assertEquals(JobStatus.FAILED,
-              job.getJobStatus().getState().getValue());
+      assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue());
       entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null,
               null, null, null, null);
-      Assert.assertEquals(2, entities.getEntities().size());
+      assertEquals(2, entities.getEntities().size());
       tEntity = entities.getEntities().get(0);
-      Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
-      Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
-      Assert.assertEquals(EventType.AM_STARTED.toString(),
-              tEntity.getEvents().get(tEntity.getEvents().size() - 1)
-              .getEventType());
-      Assert.assertEquals(EventType.JOB_FAILED.toString(),
+      assertEquals(job.getID().toString(), tEntity.getEntityId());
+      assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
+      assertEquals(EventType.AM_STARTED.toString(),
+          tEntity.getEvents().get(tEntity.getEvents().size() - 1)
+          .getEventType());
+      assertEquals(EventType.JOB_FAILED.toString(),
               tEntity.getEvents().get(0).getEventType());
     } finally {
       if (cluster != null) {
@@ -221,7 +220,7 @@ public class TestMRTimelineEventHandling {
           UtilsForTests.createConfigValue(101 * 1024));
       RunningJob job =
           UtilsForTests.runJobSucceed(successConf, inDir, outDir);
-      Assert.assertEquals(JobStatus.SUCCEEDED,
+      assertEquals(JobStatus.SUCCEEDED,
           job.getJobStatus().getState().getValue());
 
       YarnClient yarnClient = YarnClient.createYarnClient();
@@ -232,7 +231,7 @@ public class TestMRTimelineEventHandling {
 
       ApplicationId firstAppId = null;
       List<ApplicationReport> apps = yarnClient.getApplications(appStates);
-      Assert.assertEquals(apps.size(), 1);
+      assertEquals(apps.size(), 1);
       ApplicationReport appReport = apps.get(0);
       firstAppId = appReport.getApplicationId();
       UtilsForTests.waitForAppFinished(job, cluster);
@@ -240,11 +239,11 @@ public class TestMRTimelineEventHandling {
 
       LOG.info("Run 2nd job which should be failed.");
       job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
-      Assert.assertEquals(JobStatus.FAILED,
+      assertEquals(JobStatus.FAILED,
           job.getJobStatus().getState().getValue());
 
       apps = yarnClient.getApplications(appStates);
-      Assert.assertEquals(apps.size(), 2);
+      assertEquals(apps.size(), 2);
 
       appReport = apps.get(0).getApplicationId().equals(firstAppId) ?
           apps.get(0) : apps.get(1);
@@ -270,7 +269,7 @@ public class TestMRTimelineEventHandling {
 
     File tmpRootFolder = new File(tmpRoot);
 
-    Assert.assertTrue(tmpRootFolder.isDirectory());
+    assertTrue(tmpRootFolder.isDirectory());
     String basePath = tmpRoot + YarnConfiguration.DEFAULT_RM_CLUSTER_ID +
         File.separator +
         UserGroupInformation.getCurrentUser().getShortUserName() +
@@ -283,9 +282,8 @@ public class TestMRTimelineEventHandling {
         basePath + File.separator + "MAPREDUCE_JOB" + File.separator;
 
     File entityFolder = new File(outputDirJob);
-    Assert.assertTrue("Job output directory: " + outputDirJob +
-        " does not exist.",
-        entityFolder.isDirectory());
+    assertTrue(entityFolder.isDirectory(),
+        "Job output directory: " + outputDirJob + " does not exist.");
 
     // check for job event file
     String jobEventFileName = appId.toString().replaceAll("application", "job")
@@ -293,9 +291,8 @@ public class TestMRTimelineEventHandling {
 
     String jobEventFilePath = outputDirJob + jobEventFileName;
     File jobEventFile = new File(jobEventFilePath);
-    Assert.assertTrue("jobEventFilePath: " + jobEventFilePath +
-        " does not exist.",
-        jobEventFile.exists());
+    assertTrue(jobEventFile.exists(),
+        "jobEventFilePath: " + jobEventFilePath + " does not exist.");
     verifyEntity(jobEventFile, EventType.JOB_FINISHED.name(),
         true, false, null, false);
     Set<String> cfgsToCheck = Sets.newHashSet("dummy_conf1", "dummy_conf2",
@@ -306,10 +303,8 @@ public class TestMRTimelineEventHandling {
     String outputAppDir =
         basePath + File.separator + "YARN_APPLICATION" + File.separator;
     entityFolder = new File(outputAppDir);
-    Assert.assertTrue(
-        "Job output directory: " + outputAppDir +
-        " does not exist.",
-        entityFolder.isDirectory());
+    assertTrue(entityFolder.isDirectory(),
+        "Job output directory: " + outputAppDir + " does not exist.");
 
     // check for job event file
     String appEventFileName = appId.toString()
@@ -317,10 +312,9 @@ public class TestMRTimelineEventHandling {
 
     String appEventFilePath = outputAppDir + appEventFileName;
     File appEventFile = new File(appEventFilePath);
-    Assert.assertTrue(
+    assertTrue(appEventFile.exists(),
         "appEventFilePath: " + appEventFilePath +
-        " does not exist.",
-        appEventFile.exists());
+        " does not exist.");
     verifyEntity(appEventFile, null, true, false, null, false);
     verifyEntity(appEventFile, null, false, true, cfgsToCheck, false);
 
@@ -328,9 +322,9 @@ public class TestMRTimelineEventHandling {
     String outputDirTask =
         basePath + File.separator + "MAPREDUCE_TASK" + File.separator;
     File taskFolder = new File(outputDirTask);
-    Assert.assertTrue("Task output directory: " + outputDirTask +
-        " does not exist.",
-        taskFolder.isDirectory());
+    assertTrue(taskFolder.isDirectory(),
+        "Task output directory: " + outputDirTask +
+        " does not exist.");
 
     String taskEventFileName =
         appId.toString().replaceAll("application", "task") +
@@ -339,9 +333,8 @@ public class TestMRTimelineEventHandling {
 
     String taskEventFilePath = outputDirTask + taskEventFileName;
     File taskEventFile = new File(taskEventFilePath);
-    Assert.assertTrue("taskEventFileName: " + taskEventFilePath +
-        " does not exist.",
-        taskEventFile.exists());
+    assertTrue(taskEventFile.exists(),
+        "taskEventFileName: " + taskEventFilePath + " does not exist.");
     verifyEntity(taskEventFile, EventType.TASK_FINISHED.name(),
         true, false, null, true);
 
@@ -349,8 +342,9 @@ public class TestMRTimelineEventHandling {
     String outputDirTaskAttempt =
         basePath + File.separator + "MAPREDUCE_TASK_ATTEMPT" + File.separator;
     File taskAttemptFolder = new File(outputDirTaskAttempt);
-    Assert.assertTrue("TaskAttempt output directory: " + outputDirTaskAttempt +
-        " does not exist.", taskAttemptFolder.isDirectory());
+    assertTrue(taskAttemptFolder.isDirectory(),
+        "TaskAttempt output directory: " + outputDirTaskAttempt +
+        " does not exist.");
 
     String taskAttemptEventFileName = appId.toString().replaceAll(
         "application", "attempt") + "_m_000000_0" +
@@ -359,8 +353,9 @@ public class TestMRTimelineEventHandling {
     String taskAttemptEventFilePath = outputDirTaskAttempt +
         taskAttemptEventFileName;
     File taskAttemptEventFile = new File(taskAttemptEventFilePath);
-    Assert.assertTrue("taskAttemptEventFileName: " + taskAttemptEventFilePath +
-        " does not exist.", taskAttemptEventFile.exists());
+    assertTrue(taskAttemptEventFile.exists(),
+        "taskAttemptEventFileName: " + taskAttemptEventFilePath +
+        " does not exist.");
     verifyEntity(taskAttemptEventFile, EventType.MAP_ATTEMPT_FINISHED.name(),
         true, false, null, true);
   }
@@ -397,14 +392,14 @@ public class TestMRTimelineEventHandling {
 
           LOG.info("strLine.trim()= " + strLine.trim());
           if (checkIdPrefix) {
-            Assert.assertTrue("Entity ID prefix expected to be > 0",
-                entity.getIdPrefix() > 0);
+            assertTrue(entity.getIdPrefix() > 0,
+                "Entity ID prefix expected to be > 0");
             if (idPrefix == -1) {
               idPrefix = entity.getIdPrefix();
             } else {
-              Assert.assertEquals("Entity ID prefix should be same across " +
-                  "each publish of same entity",
-                      idPrefix, entity.getIdPrefix());
+              assertEquals(idPrefix, entity.getIdPrefix(),
+                  "Entity ID prefix should be same across " +
+                  "each publish of same entity");
             }
           }
           if (eventId == null) {
@@ -492,21 +487,21 @@ public class TestMRTimelineEventHandling {
 
       RunningJob job =
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
-      Assert.assertEquals(JobStatus.SUCCEEDED,
+      assertEquals(JobStatus.SUCCEEDED,
           job.getJobStatus().getState().getValue());
       TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null,
           null, null, null, null, null, null, null);
-      Assert.assertEquals(0, entities.getEntities().size());
+      assertEquals(0, entities.getEntities().size());
 
       conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
       job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
-      Assert.assertEquals(JobStatus.SUCCEEDED,
+      assertEquals(JobStatus.SUCCEEDED,
           job.getJobStatus().getState().getValue());
       entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null,
           null, null, null, null);
-      Assert.assertEquals(1, entities.getEntities().size());
+      assertEquals(1, entities.getEntities().size());
       TimelineEntity tEntity = entities.getEntities().get(0);
-      Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
+      assertEquals(job.getID().toString(), tEntity.getEntityId());
     } finally {
       if (cluster != null) {
         cluster.stop();
@@ -532,21 +527,21 @@ public class TestMRTimelineEventHandling {
       conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
       RunningJob job =
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
-      Assert.assertEquals(JobStatus.SUCCEEDED,
+      assertEquals(JobStatus.SUCCEEDED,
           job.getJobStatus().getState().getValue());
       TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null,
           null, null, null, null, null, null, null);
-      Assert.assertEquals(0, entities.getEntities().size());
+      assertEquals(0, entities.getEntities().size());
 
       conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
       job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
-      Assert.assertEquals(JobStatus.SUCCEEDED,
+      assertEquals(JobStatus.SUCCEEDED,
           job.getJobStatus().getState().getValue());
       entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null,
           null, null, null, null);
-      Assert.assertEquals(1, entities.getEntities().size());
+      assertEquals(1, entities.getEntities().size());
       TimelineEntity tEntity = entities.getEntities().get(0);
-      Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
+      assertEquals(job.getID().toString(), tEntity.getEntityId());
     } finally {
       if (cluster != null) {
         cluster.stop();

+ 6 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapOutputType.java

@@ -31,11 +31,11 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.MRConfig;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
 
 
 /** 
@@ -90,7 +90,7 @@ public class TestMapOutputType {
     }
   }
 
-  @Before
+  @BeforeEach
   public void configure() throws Exception {
     Path testdir = new Path(TEST_DIR.getAbsolutePath());
     Path inDir = new Path(testdir, "in");
@@ -124,7 +124,7 @@ public class TestMapOutputType {
     jc = new JobClient(conf);
   }
 
-  @After
+  @AfterEach
   public void cleanup() {
     FileUtil.fullyDelete(TEST_DIR);
   }

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java

@@ -36,11 +36,11 @@ import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
 import org.apache.hadoop.mapreduce.split.JobSplitWriter;
 import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *  Validates map phase progress.
@@ -193,8 +193,8 @@ public class TestMapProgress {
         return;
       }
       // validate map task progress when the map task is in map phase
-      assertTrue("Map progress is not the expected value.",
-                 Math.abs(mapTaskProgress - ((float)recordNum/3)) < 0.001);
+      assertTrue(Math.abs(mapTaskProgress - ((float)recordNum/3)) < 0.001,
+          "Map progress is not the expected value.");
     }
   }
 

+ 17 - 19
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java

@@ -48,11 +48,11 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**********************************************************
  * MapredLoadTest generates a bunch of work that exercises
@@ -254,7 +254,7 @@ public class TestMapRed extends Configured implements Tool {
   private static int counts = 100;
   private static Random r = new Random();
 
-  @After
+  @AfterEach
   public void cleanup() {
     FileUtil.fullyDelete(TEST_DIR);
   }
@@ -309,12 +309,11 @@ public class TestMapRed extends Configured implements Tool {
         mapOutputFile.setConf(conf);
         Path input = mapOutputFile.getInputFile(0);
         FileSystem fs = FileSystem.get(conf);
-        assertTrue("reduce input exists " + input, fs.exists(input));
+        assertTrue(fs.exists(input), "reduce input exists " + input);
         SequenceFile.Reader rdr = 
           new SequenceFile.Reader(fs, input, conf);
-        assertEquals("is reduce input compressed " + input, 
-                     compressInput, 
-                     rdr.isCompressed());
+        assertEquals(compressInput, rdr.isCompressed(),
+            "is reduce input compressed " + input);
         rdr.close();          
       }
     }
@@ -372,10 +371,10 @@ public class TestMapRed extends Configured implements Tool {
         new Path(testdir, "nullout/part-00000"), conf);
     m = "AAAAAAAAAAAAAA";
     for (int i = 1; r.next(NullWritable.get(), t); ++i) {
-      assertTrue("Unexpected value: " + t, values.remove(t.toString()));
+      assertTrue(values.remove(t.toString()), "Unexpected value: " + t);
       m = m.replace((char)('A' + i - 1), (char)('A' + i));
     }
-    assertTrue("Missing values: " + values.toString(), values.isEmpty());
+    assertTrue(values.isEmpty(), "Missing values: " + values.toString());
   }
 
   private void checkCompression(boolean compressMapOutputs,
@@ -415,16 +414,15 @@ public class TestMapRed extends Configured implements Tool {
       f.writeBytes("Is this done, yet?\n");
       f.close();
       RunningJob rj = JobClient.runJob(conf);
-      assertTrue("job was complete", rj.isComplete());
-      assertTrue("job was successful", rj.isSuccessful());
+      assertTrue(rj.isComplete(), "job was complete");
+      assertTrue(rj.isSuccessful(), "job was successful");
       Path output = new Path(outDir,
                              Task.getOutputName(0));
-      assertTrue("reduce output exists " + output, fs.exists(output));
+      assertTrue(fs.exists(output), "reduce output exists " + output);
       SequenceFile.Reader rdr = 
         new SequenceFile.Reader(fs, output, conf);
-      assertEquals("is reduce output compressed " + output, 
-                   redCompression != CompressionType.NONE, 
-                   rdr.isCompressed());
+      assertEquals(redCompression != CompressionType.NONE,
+          rdr.isCompressed(), "is reduce output compressed " + output);
       rdr.close();
     } finally {
       fs.delete(testdir, true);
@@ -663,7 +661,7 @@ public class TestMapRed extends Configured implements Tool {
     } finally {
       bw.close();
     }
-    assertTrue("testMapRed failed", success);
+    assertTrue(success, "testMapRed failed");
     fs.delete(testdir, true);
   }
 
@@ -778,7 +776,7 @@ public class TestMapRed extends Configured implements Tool {
 
       JobClient.runJob(conf);
     } catch (Exception e) {
-      assertTrue("Threw exception:" + e,false);
+      assertTrue(false, "Threw exception:" + e);
     }
   }
 

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java

@@ -44,8 +44,8 @@ import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
 
 import org.apache.hadoop.mapred.Task.TaskReporter;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 @SuppressWarnings(value={"unchecked", "deprecation"})
 /**

+ 4 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java

@@ -20,12 +20,13 @@ package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 /**
  * A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster.
  */
@@ -50,7 +51,7 @@ public class TestMiniMRBringup {
       mr = new MiniMRYarnCluster("testMiniMRYarnClusterWithoutJHS");
       mr.init(conf);
       mr.start();
-      Assert.assertEquals(null, mr.getHistoryServer());
+      assertEquals(null, mr.getHistoryServer());
     } finally {
       if (mr != null) {
         mr.stop();

+ 20 - 23
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java

@@ -18,10 +18,10 @@
 package org.apache.hadoop.mapred;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.DataOutputStream;
 import java.io.File;
@@ -42,9 +42,9 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 import org.apache.hadoop.util.Shell;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -170,19 +170,18 @@ public class TestMiniMRChildTask {
       boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false);
       if (oldConfigs) {
         String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
-        assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", 
-                      javaOpts);
+        assertNotNull(javaOpts,
+            JobConf.MAPRED_TASK_JAVA_OPTS + " is null!");
         assertThat(javaOpts)
             .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: "
                 + javaOpts)
             .isEqualTo(TASK_OPTS_VAL);
       } else {
         String mapJavaOpts = job.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS);
-        assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!", 
-                      mapJavaOpts);
-        assertEquals(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + 
-                     mapJavaOpts, 
-                     mapJavaOpts, MAP_OPTS_VAL);
+        assertNotNull(mapJavaOpts,
+            JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!");
+        assertEquals(mapJavaOpts, MAP_OPTS_VAL,
+            JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + mapJavaOpts);
       }
 
       // check if X=y works for an already existing parameter
@@ -193,8 +192,7 @@ public class TestMiniMRChildTask {
       checkEnv("NEW_PATH", File.pathSeparator + "/tmp", "noappend");
 
       String jobLocalDir = job.get(MRJobConfig.JOB_LOCAL_DIR);
-      assertNotNull(MRJobConfig.JOB_LOCAL_DIR + " is null",
-                    jobLocalDir);
+      assertNotNull(jobLocalDir, MRJobConfig.JOB_LOCAL_DIR + " is null");
     }
 
     public void map(WritableComparable key, Writable value,
@@ -214,16 +212,15 @@ public class TestMiniMRChildTask {
       boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false);
       if (oldConfigs) {
         String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
-        assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", 
-                      javaOpts);
+        assertNotNull(javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!");
         assertThat(javaOpts)
             .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: "
                 + javaOpts)
             .isEqualTo(TASK_OPTS_VAL);
       } else {
         String reduceJavaOpts = job.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS);
-        assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!", 
-                      reduceJavaOpts);
+        assertNotNull(reduceJavaOpts,
+            JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!");
         assertThat(reduceJavaOpts)
             .withFailMessage(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS +
                 " has value of: " + reduceJavaOpts)
@@ -247,7 +244,7 @@ public class TestMiniMRChildTask {
     
   }
   
-  @BeforeClass
+  @BeforeAll
   public static void setup() throws IOException {
     // create configuration, dfs, file system and mapred cluster 
     dfs = new MiniDFSCluster.Builder(conf).build();
@@ -272,7 +269,7 @@ public class TestMiniMRChildTask {
     localFs.setPermission(APP_JAR, new FsPermission("700"));
   }
 
-  @AfterClass
+  @AfterAll
   public static void tearDown() {
     // close file system and shut down dfs and mapred cluster
     try {
@@ -378,7 +375,7 @@ public class TestMiniMRChildTask {
     job.setMaxMapAttempts(1); // speed up failures
     job.waitForCompletion(true);
     boolean succeeded = job.waitForCompletion(true);
-    assertTrue("The environment checker job failed.", succeeded);
+    assertTrue(succeeded, "The environment checker job failed.");
   }
   
 }

+ 5 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java

@@ -32,8 +32,9 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * A JUnit test to test Mini Map-Reduce Cluster with multiple directories
@@ -175,7 +176,7 @@ public class TestMiniMRClasspath {
       String result;
       result = launchWordCount(fileSys.getUri(), jobConf,
           "The quick brown fox\nhas many silly\n" + "red fox sox\n", 3, 1);
-      Assert.assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n"
+      assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n"
           + "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result);
           
     } finally {
@@ -208,7 +209,7 @@ public class TestMiniMRClasspath {
       
       result = launchExternal(fileSys.getUri(), jobConf,
           "Dennis was here!\nDennis again!", 3, 1);
-      Assert.assertEquals("Dennis again!\t1\nDennis was here!\t1\n", result);
+      assertEquals("Dennis again!\t1\nDennis was here!\t1\n", result);
       
     } 
     finally {

+ 35 - 36
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.mapred;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.IOException;
 import java.util.StringTokenizer;
@@ -34,9 +34,9 @@ import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * Basic testing for the MiniMRClientCluster. This test shows an example class
@@ -54,7 +54,7 @@ public class TestMiniMRClientCluster {
   private class InternalClass {
   }
 
-  @BeforeClass
+  @BeforeAll
   public static void setup() throws IOException {
     final Configuration conf = new Configuration();
     final Path TEST_ROOT_DIR = new Path(System.getProperty("test.build.data",
@@ -81,7 +81,7 @@ public class TestMiniMRClientCluster {
         InternalClass.class, 1, new Configuration());
   }
 
-  @AfterClass
+  @AfterAll
   public static void cleanup() throws IOException {
     // clean up the input and output files
     final Configuration conf = new Configuration();
@@ -128,27 +128,26 @@ public class TestMiniMRClientCluster {
     String mrHistWebAppAddress2 = mrCluster.getConfig().get(
         JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS);
 
-    assertEquals("Address before restart: " + rmAddress1
-        + " is different from new address: " + rmAddress2, rmAddress1,
-        rmAddress2);
-    assertEquals("Address before restart: " + rmAdminAddress1
-        + " is different from new address: " + rmAdminAddress2,
-        rmAdminAddress1, rmAdminAddress2);
-    assertEquals("Address before restart: " + rmSchedAddress1
-        + " is different from new address: " + rmSchedAddress2,
-        rmSchedAddress1, rmSchedAddress2);
-    assertEquals("Address before restart: " + rmRstrackerAddress1
-        + " is different from new address: " + rmRstrackerAddress2,
-        rmRstrackerAddress1, rmRstrackerAddress2);
-    assertEquals("Address before restart: " + rmWebAppAddress1
-        + " is different from new address: " + rmWebAppAddress2,
-        rmWebAppAddress1, rmWebAppAddress2);
-    assertEquals("Address before restart: " + mrHistAddress1
-        + " is different from new address: " + mrHistAddress2, mrHistAddress1,
-        mrHistAddress2);
-    assertEquals("Address before restart: " + mrHistWebAppAddress1
-        + " is different from new address: " + mrHistWebAppAddress2,
-        mrHistWebAppAddress1, mrHistWebAppAddress2);
+    assertEquals(rmAddress1, rmAddress2, "Address before restart: " + rmAddress1
+        + " is different from new address: " + rmAddress2);
+    assertEquals(rmAdminAddress1, rmAdminAddress2,
+        "Address before restart: " + rmAdminAddress1
+        + " is different from new address: " + rmAdminAddress2);
+    assertEquals(rmSchedAddress1, rmSchedAddress2,
+        "Address before restart: " + rmSchedAddress1
+        + " is different from new address: " + rmSchedAddress2);
+    assertEquals(rmRstrackerAddress1, rmRstrackerAddress2,
+        "Address before restart: " + rmRstrackerAddress1
+        + " is different from new address: " + rmRstrackerAddress2);
+    assertEquals(rmWebAppAddress1, rmWebAppAddress2,
+        "Address before restart: " + rmWebAppAddress1
+        + " is different from new address: " + rmWebAppAddress2);
+    assertEquals(mrHistAddress1, mrHistAddress2,
+        "Address before restart: " + mrHistAddress1
+        + " is different from new address: " + mrHistAddress2);
+    assertEquals(mrHistWebAppAddress1, mrHistWebAppAddress2,
+        "Address before restart: " + mrHistWebAppAddress1
+        + " is different from new address: " + mrHistWebAppAddress2);
 
   }
 
@@ -165,14 +164,14 @@ public class TestMiniMRClientCluster {
 
   private void validateCounters(Counters counters, long mapInputRecords,
       long mapOutputRecords, long reduceInputGroups, long reduceOutputRecords) {
-    assertEquals("MapInputRecords", mapInputRecords, counters.findCounter(
-        "MyCounterGroup", "MAP_INPUT_RECORDS").getValue());
-    assertEquals("MapOutputRecords", mapOutputRecords, counters.findCounter(
-        "MyCounterGroup", "MAP_OUTPUT_RECORDS").getValue());
-    assertEquals("ReduceInputGroups", reduceInputGroups, counters.findCounter(
-        "MyCounterGroup", "REDUCE_INPUT_GROUPS").getValue());
-    assertEquals("ReduceOutputRecords", reduceOutputRecords, counters
-        .findCounter("MyCounterGroup", "REDUCE_OUTPUT_RECORDS").getValue());
+    assertEquals(mapInputRecords, counters.findCounter("MyCounterGroup",
+        "MAP_INPUT_RECORDS").getValue(), "MapInputRecords");
+    assertEquals(mapOutputRecords, counters.findCounter("MyCounterGroup",
+        "MAP_OUTPUT_RECORDS").getValue(), "MapOutputRecords");
+    assertEquals(reduceInputGroups, counters.findCounter("MyCounterGroup",
+        "REDUCE_INPUT_GROUPS").getValue(), "ReduceInputGroups");
+    assertEquals(reduceOutputRecords, counters.findCounter("MyCounterGroup",
+        "REDUCE_OUTPUT_RECORDS").getValue(), "ReduceOutputRecords");
   }
 
   private static void createFile(Path inFile, Configuration conf)

+ 6 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java

@@ -21,9 +21,9 @@ package org.apache.hadoop.mapred;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.MRCaching.TestResult;
-import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.IOException;
 
@@ -31,7 +31,7 @@ import java.io.IOException;
  * A JUnit test to test caching with DFS
  * 
  */
-@Ignore
+@Disabled
 public class TestMiniMRDFSCaching {
 
   @Test
@@ -52,7 +52,7 @@ public class TestMiniMRDFSCaching {
                                             mr.createJobConf(),
                                             "The quick brown fox\nhas many silly\n"
                                             + "red fox sox\n");
-      assertTrue("Archives not matching", ret.isOutputOk);
+      assertTrue(ret.isOutputOk, "Archives not matching");
       // launch MR cache with symlinks
       ret = MRCaching.launchMRCache("/testing/wc/input",
                                     "/testing/wc/output",
@@ -60,7 +60,7 @@ public class TestMiniMRDFSCaching {
                                     mr.createJobConf(),
                                     "The quick brown fox\nhas many silly\n"
                                     + "red fox sox\n");
-      assertTrue("Archives not matching", ret.isOutputOk);
+      assertTrue(ret.isOutputOk, "Archives not matching");
     } finally {
       if (fileSys != null) {
         fileSys.close();

+ 8 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java

@@ -28,10 +28,11 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * A JUnit test to test Mini Map-Reduce Cluster with Mini-DFS.
@@ -70,10 +71,10 @@ public class TestMiniMRWithDFSWithDistinctUsers {
       });
 
     rj.waitForCompletion();
-    Assert.assertEquals("SUCCEEDED", JobStatus.getJobRunState(rj.getJobState()));
+    assertEquals("SUCCEEDED", JobStatus.getJobRunState(rj.getJobState()));
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     dfs = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
 
@@ -98,7 +99,7 @@ public class TestMiniMRWithDFSWithDistinctUsers {
                            1, null, null, MR_UGI, mrConf);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     if (mr != null) { mr.shutdown();}
     if (dfs != null) { dfs.shutdown(); }

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java

@@ -25,12 +25,12 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 public class TestMultiFileInputFormat {
 

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java

@@ -28,11 +28,11 @@ import java.io.OutputStream;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * 

+ 10 - 12
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java

@@ -27,17 +27,17 @@ import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * This test checks whether the task caches are created and used properly.
  */
-@Ignore
+@Disabled
 public class TestMultipleLevelCaching {
   private static final int MAX_LEVEL = 5;
   final Path inDir = new Path("/cachetesting");
@@ -158,14 +158,12 @@ public class TestMultipleLevelCaching {
     }
     RunningJob job = launchJob(jobConf, in, out, numMaps, jobName);
     Counters counters = job.getCounters();
-    assertEquals("Number of local maps",
-            counters.getCounter(JobCounter.OTHER_LOCAL_MAPS), otherLocalMaps);
-    assertEquals("Number of Data-local maps",
-            counters.getCounter(JobCounter.DATA_LOCAL_MAPS),
-                                dataLocalMaps);
-    assertEquals("Number of Rack-local maps",
-            counters.getCounter(JobCounter.RACK_LOCAL_MAPS),
-                                rackLocalMaps);
+    assertEquals(counters.getCounter(JobCounter.OTHER_LOCAL_MAPS),
+        otherLocalMaps, "Number of local maps");
+    assertEquals(counters.getCounter(JobCounter.DATA_LOCAL_MAPS),
+        dataLocalMaps, "Number of Data-local maps");
+    assertEquals(counters.getCounter(JobCounter.RACK_LOCAL_MAPS),
+        rackLocalMaps, "Number of Rack-local maps");
     mr.waitUntilIdle();
     mr.shutdown();
   }

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java

@@ -22,13 +22,13 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.File;
 import java.io.IOException;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class TestMultipleTextOutputFormat {
   private static JobConf defaultConf = new JobConf();

+ 24 - 15
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java

@@ -19,7 +19,10 @@
 package org.apache.hadoop.mapred;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
@@ -47,7 +50,8 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 public class TestNetworkedJob {
   private static String TEST_ROOT_DIR = new File(System.getProperty(
@@ -56,7 +60,8 @@ public class TestNetworkedJob {
   private static Path inFile = new Path(testDir, "in");
   private static Path outDir = new Path(testDir, "out");
 
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testGetNullCounters() throws Exception {
     //mock creation
     Job mockJob = mock(Job.class);
@@ -68,7 +73,8 @@ public class TestNetworkedJob {
     verify(mockJob).getCounters();
   }
   
-  @Test (timeout=500000)
+  @Test
+  @Timeout(value = 500)
   public void testGetJobStatus() throws IOException, InterruptedException,
       ClassNotFoundException {
     MiniMRClientCluster mr = null;
@@ -101,11 +107,11 @@ public class TestNetworkedJob {
 
       // The following asserts read JobStatus twice and ensure the returned
       // JobStatus objects correspond to the same Job.
-      assertEquals("Expected matching JobIDs", jobId, client.getJob(jobId)
-          .getJobStatus().getJobID());
-      assertEquals("Expected matching startTimes", rj.getJobStatus()
+      assertEquals(jobId, client.getJob(jobId)
+          .getJobStatus().getJobID(), "Expected matching JobIDs");
+      assertEquals(rj.getJobStatus()
           .getStartTime(), client.getJob(jobId).getJobStatus()
-          .getStartTime());
+          .getStartTime(), "Expected matching startTimes");
     } finally {
       if (fileSys != null) {
         fileSys.delete(testDir, true);
@@ -120,7 +126,8 @@ public class TestNetworkedJob {
  * @throws Exception
  */
   @SuppressWarnings( "deprecation" )
-  @Test (timeout=500000)
+  @Test
+  @Timeout(value = 500)
   public void testNetworkedJob() throws Exception {
     // mock creation
     MiniMRClientCluster mr = null;
@@ -252,10 +259,10 @@ public class TestNetworkedJob {
       // test JobClient
       // The following asserts read JobStatus twice and ensure the returned
       // JobStatus objects correspond to the same Job.
-      assertEquals("Expected matching JobIDs", jobId, client.getJob(jobId)
-          .getJobStatus().getJobID());
-      assertEquals("Expected matching startTimes", rj.getJobStatus()
-          .getStartTime(), client.getJob(jobId).getJobStatus().getStartTime());
+      assertEquals(jobId, client.getJob(jobId)
+          .getJobStatus().getJobID(), "Expected matching JobIDs");
+      assertEquals(rj.getJobStatus().getStartTime(),
+          client.getJob(jobId).getJobStatus().getStartTime(), "Expected matching startTimes");
     } finally {
       if (fileSys != null) {
         fileSys.delete(testDir, true);
@@ -271,7 +278,8 @@ public class TestNetworkedJob {
    * 
    * @throws IOException
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testBlackListInfo() throws IOException {
     BlackListInfo info = new BlackListInfo();
     info.setBlackListReport("blackListInfo");
@@ -293,7 +301,8 @@ public class TestNetworkedJob {
  *  test run from command line JobQueueClient
  * @throws Exception
  */
-  @Test (timeout=500000)
+  @Test
+  @Timeout(value = 500)
   public void testJobQueueClient() throws Exception {
         MiniMRClientCluster mr = null;
     FileSystem fileSys = null;

+ 16 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java

@@ -18,8 +18,7 @@
 
 package org.apache.hadoop.mapred;
 
-import org.junit.After;
-import org.junit.Assert;
+import org.junit.jupiter.api.AfterEach;
 
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -28,7 +27,7 @@ import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.test.GenericTestUtils;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -40,6 +39,12 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Set;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+
 public class TestOldCombinerGrouping {
   private static File testRootDir = GenericTestUtils.getRandomizedTestDir();
 
@@ -119,7 +124,7 @@ public class TestOldCombinerGrouping {
 
   }
 
-  @After
+  @AfterEach
   public void cleanup() {
     FileUtil.fullyDelete(testRootDir);
   }
@@ -169,30 +174,30 @@ public class TestOldCombinerGrouping {
       long combinerOutputRecords = counters.getGroup(
           "org.apache.hadoop.mapreduce.TaskCounter").
           getCounter("COMBINE_OUTPUT_RECORDS");
-      Assert.assertTrue(combinerInputRecords > 0);
-      Assert.assertTrue(combinerInputRecords > combinerOutputRecords);
+      assertTrue(combinerInputRecords > 0);
+      assertTrue(combinerInputRecords > combinerOutputRecords);
 
       BufferedReader br = new BufferedReader(new FileReader(
           new File(out, "part-00000")));
       Set<String> output = new HashSet<String>();
       String line = br.readLine();
-      Assert.assertNotNull(line);
+      assertNotNull(line);
       output.add(line.substring(0, 1) + line.substring(4, 5));
       line = br.readLine();
-      Assert.assertNotNull(line);
+      assertNotNull(line);
       output.add(line.substring(0, 1) + line.substring(4, 5));
       line = br.readLine();
-      Assert.assertNull(line);
+      assertNull(line);
       br.close();
 
       Set<String> expected = new HashSet<String>();
       expected.add("A2");
       expected.add("B5");
 
-      Assert.assertEquals(expected, output);
+      assertEquals(expected, output);
 
     } else {
-      Assert.fail("Job failed");
+      fail("Job failed");
     }
   }
 

+ 6 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java

@@ -32,9 +32,11 @@ import org.apache.hadoop.util.XMLUtils;
 
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
-import static org.junit.Assert.*;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestQueueConfigurationParser {
 /**
@@ -42,7 +44,8 @@ public class TestQueueConfigurationParser {
  * @throws ParserConfigurationException
  * @throws Exception 
  */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testQueueConfigurationParser()
       throws ParserConfigurationException, Exception {
     JobQueueInfo info = new JobQueueInfo("root", "rootInfo");

+ 9 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java

@@ -19,10 +19,10 @@
 package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.mapreduce.TaskCounter;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestReduceFetch extends TestReduceFetchFromPartialMem {
 
@@ -44,10 +44,11 @@ public class TestReduceFetch extends TestReduceFetchFromPartialMem {
     Counters c = runJob(job);
     final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter();
     final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter();
-    assertTrue("Expected all records spilled during reduce (" + spill + ")",
-        spill >= 2 * out); // all records spill at map, reduce
-    assertTrue("Expected intermediate merges (" + spill + ")",
-        spill >= 2 * out + (out / MAP_TASKS)); // some records hit twice
+    assertTrue(spill >= 2 * out,
+        "Expected all records spilled during reduce (" +
+        spill + ")"); // all records spill at map, reduce
+    assertTrue(spill >= 2 * out + (out / MAP_TASKS),
+        "Expected intermediate merges (" + spill + ")"); // some records hit twice
   }
 
   /**
@@ -65,6 +66,6 @@ public class TestReduceFetch extends TestReduceFetchFromPartialMem {
     Counters c = runJob(job);
     final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter();
     final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter();
-    assertEquals("Spilled records: " + spill, out, spill); // no reduce spill
+    assertEquals(out, spill, "Spilled records: " + spill); // no reduce spill
   }
 }

+ 14 - 13
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java

@@ -27,9 +27,9 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.mapreduce.task.reduce.Fetcher;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -39,16 +39,16 @@ import java.util.Formatter;
 import java.util.Iterator;
 
 import static org.apache.hadoop.mapreduce.task.reduce.Fetcher.SHUFFLE_ERR_GRP_NAME;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class TestReduceFetchFromPartialMem {
 
   protected static MiniMRCluster mrCluster = null;
   protected static MiniDFSCluster dfsCluster = null;
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     Configuration conf = new Configuration();
     dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -56,7 +56,7 @@ public class TestReduceFetchFromPartialMem {
       dfsCluster.getFileSystem().getUri().toString(), 1);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     if (dfsCluster != null) { dfsCluster.shutdown(); }
     if (mrCluster != null) { mrCluster.shutdown(); }
@@ -87,8 +87,9 @@ public class TestReduceFetchFromPartialMem {
     Counters c = runJob(job);
     final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter();
     final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter();
-    assertTrue("Expected some records not spilled during reduce" + spill + ")",
-        spill < 2 * out); // spilled map records, some records at the reduce
+    assertTrue(spill < 2 * out,
+        "Expected some records not spilled during reduce" + spill + ")");
+    // spilled map records, some records at the reduce
     long shuffleIoErrors =
         c.getGroup(SHUFFLE_ERR_GRP_NAME).getCounter(Fetcher.ShuffleErrors.IO_ERROR.toString());
     assertEquals(0, shuffleIoErrors);
@@ -226,8 +227,8 @@ public class TestReduceFetchFromPartialMem {
         out.collect(key, val);
         ++nRec;
       }
-      assertEquals("Bad rec count for " + key, recCheck, nRec - preRec);
-      assertEquals("Bad rec group for " + key, vcCheck, vc);
+      assertEquals(recCheck, nRec - preRec, "Bad rec count for " + key);
+      assertEquals(vcCheck, vc, "Bad rec group for " + key);
     }
 
     @Override
@@ -235,7 +236,7 @@ public class TestReduceFetchFromPartialMem {
       assertEquals(4095, nKey);
       assertEquals(nMaps - 1, aKey);
       assertEquals(nMaps - 1, bKey);
-      assertEquals("Bad record count", nMaps * (4096 + 2), nRec);
+      assertEquals(nMaps * (4096 + 2), nRec, "Bad record count");
     }
   }
 

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java

@@ -26,12 +26,12 @@ import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * This test exercises the ValueIterator.

+ 19 - 19
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java

@@ -31,11 +31,12 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * Tests the old mapred APIs with {@link Reporter#getProgress()}.
@@ -48,14 +49,14 @@ public class TestReporter {
   
   private static FileSystem fs = null;
 
-  @BeforeClass
+  @BeforeAll
   public static void setup() throws Exception {
     fs = FileSystem.getLocal(new Configuration());
     fs.delete(testRootTempDir, true);
     fs.mkdirs(testRootTempDir);
   }
 
-  @AfterClass
+  @AfterAll
   public static void cleanup() throws Exception {
     fs.delete(testRootTempDir, true);
   }
@@ -92,16 +93,16 @@ public class TestReporter {
       float mapProgress = ((float)++numRecords)/INPUT_LINES;
       // calculate the attempt progress based on the progress range
       float attemptProgress = progressRange * mapProgress;
-      assertEquals("Invalid progress in map", 
-                   attemptProgress, reporter.getProgress(), 0f);
+      assertEquals(attemptProgress, reporter.getProgress(), 0f,
+          "Invalid progress in map");
       output.collect(new Text(value.toString() + numRecords), value);
     }
     
     @Override
     public void close() throws IOException {
       super.close();
-      assertEquals("Invalid progress in map cleanup", 
-                   progressRange, reporter.getProgress(), 0f);
+      assertEquals(progressRange, reporter.getProgress(), 0f,
+          "Invalid progress in map cleanup");
     }
   }
 
@@ -147,7 +148,7 @@ public class TestReporter {
                            1, 0, INPUT);
     job.waitForCompletion();
     
-    assertTrue("Job failed", job.isSuccessful());
+    assertTrue(job.isSuccessful(), "Job failed");
   }
   
   /**
@@ -175,18 +176,17 @@ public class TestReporter {
     throws IOException {
       float reducePhaseProgress = ((float)++recordCount)/INPUT_LINES;
       float weightedReducePhaseProgress = 
-              reducePhaseProgress * REDUCE_PROGRESS_RANGE;
-      assertEquals("Invalid progress in reduce", 
-                   SHUFFLE_PROGRESS_RANGE + weightedReducePhaseProgress, 
-                   reporter.getProgress(), 0.02f);
+          reducePhaseProgress * REDUCE_PROGRESS_RANGE;
+      assertEquals(SHUFFLE_PROGRESS_RANGE + weightedReducePhaseProgress,
+          reporter.getProgress(), 0.02f, "Invalid progress in reduce");
       this.reporter = reporter;
     }
     
     @Override
     public void close() throws IOException {
       super.close();
-      assertEquals("Invalid progress in reduce cleanup", 
-                   1.0f, reporter.getProgress(), 0f);
+      assertEquals(1.0f, reporter.getProgress(), 0f,
+          "Invalid progress in reduce cleanup");
     }
   }
   
@@ -210,7 +210,7 @@ public class TestReporter {
                            1, 1, INPUT);
     job.waitForCompletion();
     
-    assertTrue("Job failed", job.isSuccessful());
+    assertTrue(job.isSuccessful(), "Job failed");
   }
 
   @Test
@@ -244,7 +244,7 @@ public class TestReporter {
 
     job.waitForCompletion(true);
 
-    assertTrue("Job failed", job.isSuccessful());
+    assertTrue(job.isSuccessful(), "Job failed");
   }
 
 }

+ 37 - 35
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java

@@ -22,8 +22,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.junit.Assert;
-
 import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.JobStatus.State;
 import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
@@ -41,9 +39,15 @@ import org.apache.hadoop.yarn.client.api.impl.YarnClientImpl;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.util.Records;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 public class TestResourceMgrDelegate {
 
@@ -53,13 +57,13 @@ public class TestResourceMgrDelegate {
    */
   @Test
   public void testGetRootQueues() throws IOException, InterruptedException {
-    final ApplicationClientProtocol applicationsManager = Mockito.mock(ApplicationClientProtocol.class);
-    GetQueueInfoResponse response = Mockito.mock(GetQueueInfoResponse.class);
+    final ApplicationClientProtocol applicationsManager = mock(ApplicationClientProtocol.class);
+    GetQueueInfoResponse response = mock(GetQueueInfoResponse.class);
     org.apache.hadoop.yarn.api.records.QueueInfo queueInfo =
-      Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class);
-    Mockito.when(response.getQueueInfo()).thenReturn(queueInfo);
+        mock(org.apache.hadoop.yarn.api.records.QueueInfo.class);
+    when(response.getQueueInfo()).thenReturn(queueInfo);
     try {
-      Mockito.when(applicationsManager.getQueueInfo(Mockito.any(
+      when(applicationsManager.getQueueInfo(any(
         GetQueueInfoRequest.class))).thenReturn(response);
     } catch (YarnException e) {
       throw new IOException(e);
@@ -69,7 +73,7 @@ public class TestResourceMgrDelegate {
       new YarnConfiguration()) {
       @Override
       protected void serviceStart() throws Exception {
-        Assert.assertTrue(this.client instanceof YarnClientImpl);
+        assertTrue(this.client instanceof YarnClientImpl);
         ((YarnClientImpl) this.client).setRMClient(applicationsManager);
       }
     };
@@ -78,21 +82,22 @@ public class TestResourceMgrDelegate {
     ArgumentCaptor<GetQueueInfoRequest> argument =
       ArgumentCaptor.forClass(GetQueueInfoRequest.class);
     try {
-      Mockito.verify(applicationsManager).getQueueInfo(
+      verify(applicationsManager).getQueueInfo(
         argument.capture());
     } catch (YarnException e) {
       throw new IOException(e);
     }
 
-    Assert.assertTrue("Children of root queue not requested",
-      argument.getValue().getIncludeChildQueues());
-    Assert.assertTrue("Request wasn't to recurse through children",
-      argument.getValue().getRecursive());
+    assertTrue(argument.getValue().getIncludeChildQueues(),
+        "Children of root queue not requested");
+    assertTrue(argument.getValue().getRecursive(),
+        "Request wasn't to recurse through children");
   }
 
   @Test
   public void tesAllJobs() throws Exception {
-    final ApplicationClientProtocol applicationsManager = Mockito.mock(ApplicationClientProtocol.class);
+    final ApplicationClientProtocol applicationsManager =
+        mock(ApplicationClientProtocol.class);
     GetApplicationsResponse allApplicationsResponse = Records
         .newRecord(GetApplicationsResponse.class);
     List<ApplicationReport> applications = new ArrayList<ApplicationReport>();
@@ -105,45 +110,42 @@ public class TestResourceMgrDelegate {
     applications.add(getApplicationReport(YarnApplicationState.FAILED,
         FinalApplicationStatus.FAILED));
     allApplicationsResponse.setApplicationList(applications);
-    Mockito.when(
-        applicationsManager.getApplications(Mockito
-            .any(GetApplicationsRequest.class))).thenReturn(
-        allApplicationsResponse);
+    when(applicationsManager.getApplications(any(GetApplicationsRequest.class)))
+        .thenReturn(allApplicationsResponse);
     ResourceMgrDelegate resourceMgrDelegate = new ResourceMgrDelegate(
       new YarnConfiguration()) {
       @Override
       protected void serviceStart() throws Exception {
-        Assert.assertTrue(this.client instanceof YarnClientImpl);
+        assertTrue(this.client instanceof YarnClientImpl);
         ((YarnClientImpl) this.client).setRMClient(applicationsManager);
       }
     };
     JobStatus[] allJobs = resourceMgrDelegate.getAllJobs();
 
-    Assert.assertEquals(State.FAILED, allJobs[0].getState());
-    Assert.assertEquals(State.SUCCEEDED, allJobs[1].getState());
-    Assert.assertEquals(State.KILLED, allJobs[2].getState());
-    Assert.assertEquals(State.FAILED, allJobs[3].getState());
+    assertEquals(State.FAILED, allJobs[0].getState());
+    assertEquals(State.SUCCEEDED, allJobs[1].getState());
+    assertEquals(State.KILLED, allJobs[2].getState());
+    assertEquals(State.FAILED, allJobs[3].getState());
   }
 
   private ApplicationReport getApplicationReport(
       YarnApplicationState yarnApplicationState,
       FinalApplicationStatus finalApplicationStatus) {
-    ApplicationReport appReport = Mockito.mock(ApplicationReport.class);
-    ApplicationResourceUsageReport appResources = Mockito
-        .mock(ApplicationResourceUsageReport.class);
-    Mockito.when(appReport.getApplicationId()).thenReturn(
+    ApplicationReport appReport = mock(ApplicationReport.class);
+    ApplicationResourceUsageReport appResources = mock(ApplicationResourceUsageReport.class);
+    when(appReport.getApplicationId()).thenReturn(
         ApplicationId.newInstance(0, 0));
-    Mockito.when(appResources.getNeededResources()).thenReturn(
+    when(appResources.getNeededResources()).thenReturn(
         Records.newRecord(Resource.class));
-    Mockito.when(appResources.getReservedResources()).thenReturn(
+    when(appResources.getReservedResources()).thenReturn(
         Records.newRecord(Resource.class));
-    Mockito.when(appResources.getUsedResources()).thenReturn(
+    when(appResources.getUsedResources()).thenReturn(
         Records.newRecord(Resource.class));
-    Mockito.when(appReport.getApplicationResourceUsageReport()).thenReturn(
+    when(appReport.getApplicationResourceUsageReport()).thenReturn(
         appResources);
-    Mockito.when(appReport.getYarnApplicationState()).thenReturn(
+    when(appReport.getYarnApplicationState()).thenReturn(
         yarnApplicationState);
-    Mockito.when(appReport.getFinalApplicationStatus()).thenReturn(
+    when(appReport.getFinalApplicationStatus()).thenReturn(
         finalApplicationStatus);
 
     return appReport;

+ 8 - 12
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java

@@ -25,13 +25,13 @@ import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.slf4j.Logger;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.util.Random;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestSequenceFileAsBinaryInputFormat {
   private static final Logger LOG = FileInputFormat.LOG;
@@ -88,21 +88,17 @@ public class TestSequenceFileAsBinaryInputFormat {
           cmpkey.readFields(buf);
           buf.reset(bval.getBytes(), bval.getLength());
           cmpval.readFields(buf);
-          assertTrue(
-              "Keys don't match: " + "*" + cmpkey.toString() + ":" +
-                                           tkey.toString() + "*",
-              cmpkey.toString().equals(tkey.toString()));
-          assertTrue(
-              "Vals don't match: " + "*" + cmpval.toString() + ":" +
-                                           tval.toString() + "*",
-              cmpval.toString().equals(tval.toString()));
+          assertTrue(cmpkey.toString().equals(tkey.toString()),
+              "Keys don't match: " + "*" + cmpkey.toString() + ":" + tkey.toString() + "*");
+          assertTrue(cmpval.toString().equals(tval.toString()),
+              "Vals don't match: " + "*" + cmpval.toString() + ":" + tval.toString() + "*");
           ++count;
         }
       } finally {
         reader.close();
       }
     }
-    assertEquals("Some records not found", RECORDS, count);
+    assertEquals(RECORDS, count, "Some records not found");
   }
 
 }

+ 18 - 25
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java

@@ -30,13 +30,13 @@ import org.apache.hadoop.io.DoubleWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class TestSequenceFileAsBinaryOutputFormat {
   private static final Logger LOG =
@@ -122,10 +122,8 @@ public class TestSequenceFileAsBinaryOutputFormat {
         while (reader.next(iwritable, dwritable)) {
           sourceInt = r.nextInt();
           sourceDouble = r.nextDouble();
-          assertEquals(
-              "Keys don't match: " + "*" + iwritable.get() + ":" + 
-                                           sourceInt + "*",
-              sourceInt, iwritable.get());
+          assertEquals(sourceInt, iwritable.get(),
+              "Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*");
           assertThat(dwritable.get()).withFailMessage(
               "Vals don't match: " + "*" + dwritable.get() + ":" +
                   sourceDouble + "*")
@@ -136,7 +134,7 @@ public class TestSequenceFileAsBinaryOutputFormat {
         reader.close();
       }
     }
-    assertEquals("Some records not found", RECORDS, count);
+    assertEquals(RECORDS, count, "Some records not found");
   }
 
   @Test
@@ -149,29 +147,24 @@ public class TestSequenceFileAsBinaryOutputFormat {
     job.setOutputKeyClass(FloatWritable.class);
     job.setOutputValueClass(BooleanWritable.class);
 
-    assertEquals("SequenceFileOutputKeyClass should default to ouputKeyClass", 
-             FloatWritable.class,
-             SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass(
-                                                                         job));
-    assertEquals("SequenceFileOutputValueClass should default to " 
-             + "ouputValueClass", 
-             BooleanWritable.class,
-             SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(
-                                                                         job));
+    assertEquals(FloatWritable.class,
+        SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass(job),
+        "SequenceFileOutputKeyClass should default to ouputKeyClass");
+    assertEquals(BooleanWritable.class,
+        SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job),
+        "SequenceFileOutputValueClass should default to ouputValueClass");
 
     SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, 
                                           IntWritable.class );
     SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, 
                                           DoubleWritable.class ); 
 
-    assertEquals("SequenceFileOutputKeyClass not updated", 
-             IntWritable.class,
-             SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass(
-                                                                         job));
-    assertEquals("SequenceFileOutputValueClass not updated", 
-             DoubleWritable.class,
-             SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(
-                                                                         job));
+    assertEquals(IntWritable.class,
+        SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass(job),
+        "SequenceFileOutputKeyClass not updated");
+    assertEquals(DoubleWritable.class,
+        SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job),
+        "SequenceFileOutputValueClass not updated");
   }
 
   @Test

+ 7 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java

@@ -26,13 +26,13 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.slf4j.Logger;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.BitSet;
 import java.util.Random;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 public class TestSequenceFileAsTextInputFormat {
   private static final Logger LOG = FileInputFormat.LOG;
@@ -94,7 +94,8 @@ public class TestSequenceFileAsTextInputFormat {
           RecordReader<Text, Text> reader =
             format.getRecordReader(splits[j], job, reporter);
           Class readerClass = reader.getClass();
-          assertEquals("reader class is SequenceFileAsTextRecordReader.", SequenceFileAsTextRecordReader.class, readerClass);        
+          assertEquals(SequenceFileAsTextRecordReader.class, readerClass,
+              "reader class is SequenceFileAsTextRecordReader.");
           Text value = reader.createValue();
           Text key = reader.createKey();
           try {
@@ -105,7 +106,7 @@ public class TestSequenceFileAsTextInputFormat {
               // LOG.info("@"+reader.getPos());
               // }
               int keyInt = Integer.parseInt(key.toString());
-              assertFalse("Key in multiple partitions.", bits.get(keyInt));
+              assertFalse(bits.get(keyInt), "Key in multiple partitions.");
               bits.set(keyInt);
               count++;
             }
@@ -114,7 +115,7 @@ public class TestSequenceFileAsTextInputFormat {
             reader.close();
           }
         }
-        assertEquals("Some keys in no partition.", length, bits.cardinality());
+        assertEquals(length, bits.cardinality(), "Some keys in no partition.");
       }
 
     }

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java

@@ -25,13 +25,13 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.slf4j.Logger;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.util.Random;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestSequenceFileInputFilter {
   private static final Logger LOG = FileInputFormat.LOG;

+ 5 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java

@@ -25,13 +25,13 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.slf4j.Logger;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.BitSet;
 import java.util.Random;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 public class TestSequenceFileInputFormat {
   private static final Logger LOG = FileInputFormat.LOG;
@@ -102,7 +102,7 @@ public class TestSequenceFileInputFormat {
               // LOG.info("splits["+j+"]="+splits[j]+" : " + key.get());
               // LOG.info("@"+reader.getPos());
               // }
-              assertFalse("Key in multiple partitions.", bits.get(key.get()));
+              assertFalse(bits.get(key.get()), "Key in multiple partitions.");
               bits.set(key.get());
               count++;
             }
@@ -111,7 +111,7 @@ public class TestSequenceFileInputFormat {
             reader.close();
           }
         }
-        assertEquals("Some keys in no partition.", length, bits.cardinality());
+        assertEquals(length, bits.cardinality(), "Some keys in no partition.");
       }
 
     }

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java

@@ -18,13 +18,13 @@
 package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.mapred.SortedRanges.Range;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Iterator;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestSortedRanges {
   private static final Logger LOG =

+ 5 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java

@@ -30,12 +30,12 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 /**
  * A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
@@ -85,7 +85,8 @@ public class TestSpecialCharactersInOutputPath {
     try {
       assertTrue(runningJob.isComplete());
       assertTrue(runningJob.isSuccessful());
-      assertTrue("Output folder not found!", fs.exists(new Path("/testing/output/" + OUTPUT_FILENAME)));
+      assertTrue(fs.exists(new Path("/testing/output/" + OUTPUT_FILENAME)),
+          "Output folder not found!");
     } catch (NullPointerException npe) {
       // This NPE should no more happens
       fail("A NPE should not have happened.");

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java

@@ -21,12 +21,12 @@ import java.util.Map;
 
 import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
 import org.apache.hadoop.mapred.StatisticsCollector.Stat;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
 
 public class TestStatisticsCollector {
 

+ 16 - 16
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java

@@ -25,16 +25,16 @@ import org.apache.hadoop.ipc.ProtocolSignature;
 import org.apache.hadoop.mapred.SortedRanges.Range;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.File;
 import java.io.IOException;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 
 
@@ -86,7 +86,7 @@ public class TestTaskCommit extends HadoopTestCase {
     super(LOCAL_MR, LOCAL_FS, 1, 1);
   }
   
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     super.tearDown();
     FileUtil.fullyDelete(new File(rootDir.toString()));
@@ -250,43 +250,43 @@ public class TestTaskCommit extends HadoopTestCase {
     task.setTaskCleanupTask();
     MyUmbilical umbilical = new MyUmbilical();
     task.run(job, umbilical);
-    assertTrue("Task did not succeed", umbilical.taskDone);
+    assertTrue(umbilical.taskDone, "Task did not succeed");
   }
 
   @Test
   public void testCommitRequiredForMapTask() throws Exception {
     Task testTask = createDummyTask(TaskType.MAP);
-    assertTrue("MapTask should need commit", testTask.isCommitRequired());
+    assertTrue(testTask.isCommitRequired(), "MapTask should need commit");
   }
 
   @Test
   public void testCommitRequiredForReduceTask() throws Exception {
     Task testTask = createDummyTask(TaskType.REDUCE);
-    assertTrue("ReduceTask should need commit", testTask.isCommitRequired());
+    assertTrue(testTask.isCommitRequired(), "ReduceTask should need commit");
   }
 
   @Test
   public void testCommitNotRequiredForJobSetup() throws Exception {
     Task testTask = createDummyTask(TaskType.MAP);
     testTask.setJobSetupTask();
-    assertFalse("Job setup task should not need commit", 
-        testTask.isCommitRequired());
+    assertFalse(testTask.isCommitRequired(),
+        "Job setup task should not need commit");
   }
 
   @Test
   public void testCommitNotRequiredForJobCleanup() throws Exception {
     Task testTask = createDummyTask(TaskType.MAP);
     testTask.setJobCleanupTask();
-    assertFalse("Job cleanup task should not need commit", 
-        testTask.isCommitRequired());
+    assertFalse(testTask.isCommitRequired(),
+        "Job cleanup task should not need commit");
   }
 
   @Test
   public void testCommitNotRequiredForTaskCleanup() throws Exception {
     Task testTask = createDummyTask(TaskType.REDUCE);
     testTask.setTaskCleanupTask();
-    assertFalse("Task cleanup task should not need commit", 
-        testTask.isCommitRequired());
+    assertFalse(testTask.isCommitRequired(),
+        "Task cleanup task should not need commit");
   }
 
   private Task createDummyTask(TaskType type) throws IOException, ClassNotFoundException,

+ 18 - 18
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.mapred;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestTaskPerformanceSplits {
   @Test
@@ -40,15 +40,15 @@ public class TestTaskPerformanceSplits {
       System.err.println("segment i = " + results[i]);
     }
 
-    assertEquals("Bad interpolation in cumulative segment 0", 200, results[0]);
-    assertEquals("Bad interpolation in cumulative segment 1", 200, results[1]);
-    assertEquals("Bad interpolation in cumulative segment 2", 200, results[2]);
-    assertEquals("Bad interpolation in cumulative segment 3", 300, results[3]);
-    assertEquals("Bad interpolation in cumulative segment 4", 400, results[4]);
-    assertEquals("Bad interpolation in cumulative segment 5", 2200, results[5]);
+    assertEquals(200, results[0], "Bad interpolation in cumulative segment 0");
+    assertEquals(200, results[1], "Bad interpolation in cumulative segment 1");
+    assertEquals(200, results[2], "Bad interpolation in cumulative segment 2");
+    assertEquals(300, results[3], "Bad interpolation in cumulative segment 3");
+    assertEquals(400, results[4], "Bad interpolation in cumulative segment 4");
+    assertEquals(2200, results[5], "Bad interpolation in cumulative segment 5");
     // these are rounded down
-    assertEquals("Bad interpolation in cumulative segment 6", 2200, results[6]);
-    assertEquals("Bad interpolation in cumulative segment 7", 2201, results[7]);
+    assertEquals(2200, results[6], "Bad interpolation in cumulative segment 6");
+    assertEquals(2201, results[7], "Bad interpolation in cumulative segment 7");
 
     status.extend(0.0D, 0);
     status.extend(1.0D/16.0D, 300); // + 75 for bucket 0
@@ -59,13 +59,13 @@ public class TestTaskPerformanceSplits {
 
     results = status.getValues();
 
-    assertEquals("Bad interpolation in status segment 0", 275, results[0]);
-    assertEquals("Bad interpolation in status segment 1", 750, results[1]);
-    assertEquals("Bad interpolation in status segment 2", 1500, results[2]);
-    assertEquals("Bad interpolation in status segment 3", 2175, results[3]);
-    assertEquals("Bad interpolation in status segment 4", 2100, results[4]);
-    assertEquals("Bad interpolation in status segment 5", 1900, results[5]);
-    assertEquals("Bad interpolation in status segment 6", 1700, results[6]);
-    assertEquals("Bad interpolation in status segment 7", 1500, results[7]);
+    assertEquals(275, results[0], "Bad interpolation in status segment 0");
+    assertEquals(750, results[1], "Bad interpolation in status segment 1");
+    assertEquals(1500, results[2], "Bad interpolation in status segment 2");
+    assertEquals(2175, results[3], "Bad interpolation in status segment 3");
+    assertEquals(2100, results[4], "Bad interpolation in status segment 4");
+    assertEquals(1900, results[5], "Bad interpolation in status segment 5");
+    assertEquals(1700, results[6], "Bad interpolation in status segment 6");
+    assertEquals(1500, results[7], "Bad interpolation in status segment 7");
   }
 }

+ 44 - 45
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java

@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestTaskStatus {
 
@@ -52,25 +52,24 @@ public class TestTaskStatus {
     // first try to set the finish time before
     // start time is set.
     status.setFinishTime(currentTime);
-    assertEquals("Finish time of the task status set without start time", 0,
-        status.getFinishTime());
+    assertEquals(0,
+        status.getFinishTime(), "Finish time of the task status set without start time");
     // Now set the start time to right time.
     status.setStartTime(currentTime);
-    assertEquals("Start time of the task status not set correctly.",
-        currentTime, status.getStartTime());
+    assertEquals(currentTime, status.getStartTime(),
+        "Start time of the task status not set correctly.");
     // try setting wrong start time to task status.
     long wrongTime = -1;
     status.setStartTime(wrongTime);
-    assertEquals(
-        "Start time of the task status is set to wrong negative value",
-        currentTime, status.getStartTime());
+    assertEquals(currentTime, status.getStartTime(),
+        "Start time of the task status is set to wrong negative value");
     // finally try setting wrong finish time i.e. negative value.
     status.setFinishTime(wrongTime);
-    assertEquals("Finish time of task status is set to wrong negative value",
-        0, status.getFinishTime());
+    assertEquals(0, status.getFinishTime(),
+        "Finish time of task status is set to wrong negative value");
     status.setFinishTime(currentTime);
-    assertEquals("Finish time of the task status not set correctly.",
-        currentTime, status.getFinishTime());
+    assertEquals(currentTime, status.getFinishTime(),
+        "Finish time of the task status not set correctly.");
     
     // test with null task-diagnostics
     TaskStatus ts = ((TaskStatus)status.clone());
@@ -117,19 +116,19 @@ public class TestTaskStatus {
         return false;
       }
     };
-    assertEquals("Small diagnostic info test failed", 
-                 status.getDiagnosticInfo(), test);
-    assertEquals("Small state string test failed", status.getStateString(), 
-                 test);
+    assertEquals(status.getDiagnosticInfo(), test,
+        "Small diagnostic info test failed");
+    assertEquals(status.getStateString(), test,
+        "Small state string test failed");
     
     // now append some small string and check
     String newDInfo = test.concat(test);
     status.setDiagnosticInfo(test);
     status.setStateString(newDInfo);
-    assertEquals("Small diagnostic info append failed", 
-                 newDInfo, status.getDiagnosticInfo());
-    assertEquals("Small state-string append failed", 
-                 newDInfo, status.getStateString());
+    assertEquals(newDInfo, status.getDiagnosticInfo(),
+        "Small diagnostic info append failed");
+    assertEquals(newDInfo, status.getStateString(),
+        "Small state-string append failed");
     
     // update the status with small state strings
     TaskStatus newStatus = (TaskStatus)status.clone();
@@ -138,47 +137,47 @@ public class TestTaskStatus {
     status.statusUpdate(newStatus);
     newDInfo = newDInfo.concat(newStatus.getDiagnosticInfo());
     
-    assertEquals("Status-update on diagnostic-info failed", 
-                 newDInfo, status.getDiagnosticInfo());
-    assertEquals("Status-update on state-string failed", 
-                 newSInfo, status.getStateString());
+    assertEquals(newDInfo, status.getDiagnosticInfo(),
+        "Status-update on diagnostic-info failed");
+    assertEquals(newSInfo, status.getStateString(),
+        "Status-update on state-string failed");
     
     newSInfo = "hi2";
     status.statusUpdate(0, newSInfo, null);
-    assertEquals("Status-update on state-string failed", 
-                 newSInfo, status.getStateString());
+    assertEquals(newSInfo, status.getStateString(),
+        "Status-update on state-string failed");
     
     newSInfo = "hi3";
     status.statusUpdate(null, 0, newSInfo, null, 0);
-    assertEquals("Status-update on state-string failed", 
-                 newSInfo, status.getStateString());
+    assertEquals(newSInfo, status.getStateString(),
+        "Status-update on state-string failed");
     
     
     // now append each with large string
     String large = "hihihihihihihihihihi"; // 20 chars
     status.setDiagnosticInfo(large);
     status.setStateString(large);
-    assertEquals("Large diagnostic info append test failed", 
-                 maxSize, status.getDiagnosticInfo().length());
-    assertEquals("Large state-string append test failed",
-                 maxSize, status.getStateString().length());
+    assertEquals(maxSize, status.getDiagnosticInfo().length(),
+        "Large diagnostic info append test failed");
+    assertEquals(maxSize, status.getStateString().length(),
+        "Large state-string append test failed");
     
     // update a large status with large strings
     newStatus.setDiagnosticInfo(large + "0");
     newStatus.setStateString(large + "1");
     status.statusUpdate(newStatus);
-    assertEquals("Status-update on diagnostic info failed",
-                 maxSize, status.getDiagnosticInfo().length());
-    assertEquals("Status-update on state-string failed", 
-                 maxSize, status.getStateString().length());
+    assertEquals(maxSize, status.getDiagnosticInfo().length(),
+        "Status-update on diagnostic info failed");
+    assertEquals(maxSize, status.getStateString().length(),
+        "Status-update on state-string failed");
     
     status.statusUpdate(0, large + "2", null);
-    assertEquals("Status-update on state-string failed", 
-                 maxSize, status.getStateString().length());
+    assertEquals(maxSize, status.getStateString().length(),
+        "Status-update on state-string failed");
     
     status.statusUpdate(null, 0, large + "3", null, 0);
-    assertEquals("Status-update on state-string failed", 
-                 maxSize, status.getStateString().length());
+    assertEquals(maxSize, status.getStateString().length(),
+        "Status-update on state-string failed");
     
     // test passing large string in constructor
     status = new TaskStatus(null, 0, 0, null, large, large, null, null, 
@@ -197,9 +196,9 @@ public class TestTaskStatus {
         return false;
       }
     };
-    assertEquals("Large diagnostic info test failed", 
-                maxSize, status.getDiagnosticInfo().length());
-    assertEquals("Large state-string test failed", 
-                 maxSize, status.getStateString().length());
+    assertEquals(maxSize, status.getDiagnosticInfo().length(),
+        "Large diagnostic info test failed");
+    assertEquals(maxSize, status.getStateString().length(),
+        "Large state-string test failed");
   }
 }

+ 69 - 58
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java

@@ -38,12 +38,15 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.*;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestTextInputFormat {
   private static final Logger LOG =
@@ -66,7 +69,8 @@ public class TestTextInputFormat {
       System.getProperty("test.build.data", "/tmp"),
       "TestTextInputFormat"));
 
-  @Test (timeout=500000)
+  @Test
+  @Timeout(value = 500)
   public void testFormat() throws Exception {
     JobConf job = new JobConf(defaultConf);
     Path file = new Path(workDir, "test.txt");
@@ -110,9 +114,9 @@ public class TestTextInputFormat {
         LOG.debug("splitting: got =        " + splits.length);
 
         if (length == 0) {
-           assertEquals("Files of length 0 are not returned from FileInputFormat.getSplits().", 
-                        1, splits.length);
-           assertEquals("Empty file length == 0", 0, splits[0].getLength());
+          assertEquals(1, splits.length,
+              "Files of length 0 are not returned from FileInputFormat.getSplits().");
+          assertEquals(0, splits[0].getLength(), "Empty file length == 0");
         }
 
         // check each split
@@ -131,7 +135,7 @@ public class TestTextInputFormat {
                          " in split " + j +
                          " at position "+reader.getPos());
               }
-              assertFalse("Key in multiple partitions.", bits.get(v));
+              assertFalse(bits.get(v), "Key in multiple partitions.");
               bits.set(v);
               count++;
             }
@@ -140,13 +144,14 @@ public class TestTextInputFormat {
             reader.close();
           }
         }
-        assertEquals("Some keys in no partition.", length, bits.cardinality());
+        assertEquals(length, bits.cardinality(), "Some keys in no partition.");
       }
 
     }
   }
 
-  @Test (timeout=900000)
+  @Test
+  @Timeout(value = 900)
   public void testSplitableCodecs() throws IOException {
     JobConf conf = new JobConf(defaultConf);
     int seed = new Random().nextInt();
@@ -195,7 +200,8 @@ public class TestTextInputFormat {
   }
 
   // Test a corner case when position of stream is right after BZip2 marker
-  @Test (timeout=900000)
+  @Test
+  @Timeout(value = 900)
   public void testSplitableCodecs2() throws IOException {
     JobConf conf = new JobConf(defaultConf);
     // Create the codec
@@ -253,7 +259,7 @@ public class TestTextInputFormat {
               LOG.warn("conflict with " + v + " in split " + j +
                   " at position " + reader.getPos());
             }
-            assertFalse("Key in multiple partitions.", bits.get(v));
+            assertFalse(bits.get(v), "Key in multiple partitions.");
             bits.set(v);
             counter++;
           }
@@ -266,7 +272,7 @@ public class TestTextInputFormat {
           reader.close();
         }
       }
-      assertEquals("Some keys in no partition.", length, bits.cardinality());
+      assertEquals(length, bits.cardinality(), "Some keys in no partition.");
     }
   }
 
@@ -314,7 +320,7 @@ public class TestTextInputFormat {
                     " in split " + j +
                     " at position "+reader.getPos());
           }
-          assertFalse("Key in multiple partitions.", bits.get(v));
+          assertFalse(bits.get(v), "Key in multiple partitions.");
           bits.set(v);
           counter++;
         }
@@ -327,7 +333,7 @@ public class TestTextInputFormat {
         reader.close();
       }
     }
-    assertEquals("Some keys in no partition.", length, bits.cardinality());
+    assertEquals(length, bits.cardinality(), "Some keys in no partition.");
   }
 
   private static LineReader makeStream(String str) throws IOException {
@@ -337,16 +343,17 @@ public class TestTextInputFormat {
     return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), bufsz);
   }
 
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testUTF8() throws Exception {
     LineReader in = makeStream("abcd\u20acbdcd\u20ac");
     Text line = new Text();
     in.readLine(line);
-    assertEquals("readLine changed utf8 characters", 
-                 "abcd\u20acbdcd\u20ac", line.toString());
+    assertEquals("abcd\u20acbdcd\u20ac", line.toString(),
+        "readLine changed utf8 characters");
     in = makeStream("abc\u200axyz");
     in.readLine(line);
-    assertEquals("split on fake newline", "abc\u200axyz", line.toString());
+    assertEquals("abc\u200axyz", line.toString(), "split on fake newline");
   }
 
   /**
@@ -356,7 +363,8 @@ public class TestTextInputFormat {
    *
    * @throws Exception
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testNewLines() throws Exception {
     final String STR = "a\nbb\n\nccc\rdddd\r\r\r\n\r\neeeee";
     final int STRLENBYTES = STR.getBytes().length;
@@ -365,25 +373,25 @@ public class TestTextInputFormat {
       LineReader in = makeStream(STR, bufsz);
       int c = 0;
       c += in.readLine(out); //"a"\n
-      assertEquals("line1 length, bufsz:"+bufsz, 1, out.getLength());
+      assertEquals(1, out.getLength(), "line1 length, bufsz:"+bufsz);
       c += in.readLine(out); //"bb"\n
-      assertEquals("line2 length, bufsz:"+bufsz, 2, out.getLength());
+      assertEquals(2, out.getLength(), "line2 length, bufsz:"+bufsz);
       c += in.readLine(out); //""\n
-      assertEquals("line3 length, bufsz:"+bufsz, 0, out.getLength());
+      assertEquals(0, out.getLength(), "line3 length, bufsz:"+bufsz);
       c += in.readLine(out); //"ccc"\r
-      assertEquals("line4 length, bufsz:"+bufsz, 3, out.getLength());
+      assertEquals(3, out.getLength(), "line4 length, bufsz:"+bufsz);
       c += in.readLine(out); //dddd\r
-      assertEquals("line5 length, bufsz:"+bufsz, 4, out.getLength());
+      assertEquals(4, out.getLength(), "line5 length, bufsz:"+bufsz);
       c += in.readLine(out); //""\r
-      assertEquals("line6 length, bufsz:"+bufsz, 0, out.getLength());
+      assertEquals(0, out.getLength(), "line6 length, bufsz:"+bufsz);
       c += in.readLine(out); //""\r\n
-      assertEquals("line7 length, bufsz:"+bufsz, 0, out.getLength());
+      assertEquals(0, out.getLength(), "line7 length, bufsz:"+bufsz);
       c += in.readLine(out); //""\r\n
-      assertEquals("line8 length, bufsz:"+bufsz, 0, out.getLength());
+      assertEquals(0, out.getLength(), "line8 length, bufsz:"+bufsz);
       c += in.readLine(out); //"eeeee"EOF
-      assertEquals("line9 length, bufsz:"+bufsz, 5, out.getLength());
-      assertEquals("end of file, bufsz: "+bufsz, 0, in.readLine(out));
-      assertEquals("total bytes, bufsz: "+bufsz, c, STRLENBYTES);
+      assertEquals(5, out.getLength(), "line9 length, bufsz:"+bufsz);
+      assertEquals(0, in.readLine(out), "end of file, bufsz: "+bufsz);
+      assertEquals(c, STRLENBYTES, "total bytes, bufsz: "+bufsz);
     }
   }
 
@@ -396,7 +404,8 @@ public class TestTextInputFormat {
    *
    * @throws Exception
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testMaxLineLength() throws Exception {
     final String STR = "a\nbb\n\nccc\rdddd\r\neeeee";
     final int STRLENBYTES = STR.getBytes().length;
@@ -405,23 +414,24 @@ public class TestTextInputFormat {
       LineReader in = makeStream(STR, bufsz);
       int c = 0;
       c += in.readLine(out, 1);
-      assertEquals("line1 length, bufsz: "+bufsz, 1, out.getLength());
+      assertEquals(1, out.getLength(), "line1 length, bufsz: "+bufsz);
       c += in.readLine(out, 1);
-      assertEquals("line2 length, bufsz: "+bufsz, 1, out.getLength());
+      assertEquals(1, out.getLength(), "line2 length, bufsz: "+bufsz);
       c += in.readLine(out, 1);
-      assertEquals("line3 length, bufsz: "+bufsz, 0, out.getLength());
+      assertEquals(0, out.getLength(), "line3 length, bufsz: "+bufsz);
       c += in.readLine(out, 3);
-      assertEquals("line4 length, bufsz: "+bufsz, 3, out.getLength());
+      assertEquals(3, out.getLength(), "line4 length, bufsz: "+bufsz);
       c += in.readLine(out, 10);
-      assertEquals("line5 length, bufsz: "+bufsz, 4, out.getLength());
+      assertEquals(4, out.getLength(), "line5 length, bufsz: "+bufsz);
       c += in.readLine(out, 8);
-      assertEquals("line5 length, bufsz: "+bufsz, 5, out.getLength());
-      assertEquals("end of file, bufsz: " +bufsz, 0, in.readLine(out));
-      assertEquals("total bytes, bufsz: "+bufsz, c, STRLENBYTES);
+      assertEquals(5, out.getLength(), "line5 length, bufsz: "+bufsz);
+      assertEquals(0, in.readLine(out), "end of file, bufsz: " +bufsz);
+      assertEquals(c, STRLENBYTES, "total bytes, bufsz: "+bufsz);
     }
   }
 
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testMRMaxLine() throws Exception {
     final int MAXPOS = 1024 * 1024;
     final int MAXLINE = 10 * 1024;
@@ -436,7 +446,7 @@ public class TestTextInputFormat {
       }
       @Override
       public int read(byte[] b) {
-        assertTrue("Read too many bytes from the stream", position < MAXPOSBUF);
+        assertTrue(position < MAXPOSBUF, "Read too many bytes from the stream");
         Arrays.fill(b, (byte) 0);
         position += b.length;
         return b.length;
@@ -454,10 +464,10 @@ public class TestTextInputFormat {
     conf.setInt("io.file.buffer.size", BUF); // used by LRR
     // test another constructor 
      LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf);
-    assertFalse("Read a line from null", lrr.next(key, val));
+    assertFalse(lrr.next(key, val), "Read a line from null");
     infNull.reset();
      lrr = new LineRecordReader(infNull, 0L, MAXLINE, MAXPOS);
-    assertFalse("Read a line from null", lrr.next(key, val));
+    assertFalse(lrr.next(key, val), "Read a line from null");
     
     
   }
@@ -496,7 +506,8 @@ public class TestTextInputFormat {
   /**
    * Test using the gzip codec for reading
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testGzip() throws IOException {
     JobConf job = new JobConf(defaultConf);
     CompressionCodec gzip = new GzipCodec();
@@ -510,27 +521,27 @@ public class TestTextInputFormat {
     TextInputFormat format = new TextInputFormat();
     format.configure(job);
     InputSplit[] splits = format.getSplits(job, 100);
-    assertEquals("compressed splits == 2", 2, splits.length);
+    assertEquals(2, splits.length, "compressed splits == 2");
     FileSplit tmp = (FileSplit) splits[0];
     if (tmp.getPath().getName().equals("part2.txt.gz")) {
       splits[0] = splits[1];
       splits[1] = tmp;
     }
     List<Text> results = readSplit(format, splits[0], job);
-    assertEquals("splits[0] length", 6, results.size());
-    assertEquals("splits[0][5]", " dog", results.get(5).toString());
+    assertEquals(6, results.size(), "splits[0] length");
+    assertEquals(" dog", results.get(5).toString(), "splits[0][5]");
     results = readSplit(format, splits[1], job);
-    assertEquals("splits[1] length", 2, results.size());
-    assertEquals("splits[1][0]", "this is a test", 
-                 results.get(0).toString());    
-    assertEquals("splits[1][1]", "of gzip", 
-                 results.get(1).toString());    
+    assertEquals(2, results.size(), "splits[1] length");
+    assertEquals("this is a test", results.get(0).toString(), "splits[1][0]");
+    assertEquals("of gzip",
+        results.get(1).toString(), "splits[1][1]");
   }
 
   /**
    * Test using the gzip codec and an empty input file
    */
-  @Test (timeout=5000)
+  @Test
+  @Timeout(value = 5)
   public void testGzipEmpty() throws IOException {
     JobConf job = new JobConf(defaultConf);
     CompressionCodec gzip = new GzipCodec();
@@ -541,10 +552,10 @@ public class TestTextInputFormat {
     TextInputFormat format = new TextInputFormat();
     format.configure(job);
     InputSplit[] splits = format.getSplits(job, 100);
-    assertEquals("Compressed files of length 0 are not returned from FileInputFormat.getSplits().",
-                 1, splits.length);
+    assertEquals(1, splits.length,
+        "Compressed files of length 0 are not returned from FileInputFormat.getSplits().");
     List<Text> results = readSplit(format, splits[0], job);
-    assertEquals("Compressed empty file length == 0", 0, results.size());
+    assertEquals(0, results.size(), "Compressed empty file length == 0");
   }
   
   private static String unquote(String in) {
@@ -576,7 +587,7 @@ public class TestTextInputFormat {
    * @param args
    * @throws Exception
    */
-  public static void main(String[] args) throws Exception {
+  /*public static void main(String[] args) throws Exception {
     for(String arg: args) {
       System.out.println("Working on " + arg);
       LineReader reader = makeStream(unquote(arg));
@@ -588,5 +599,5 @@ public class TestTextInputFormat {
       }
       reader.close();
     }
-  }
+  }*/
 }

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java

@@ -22,10 +22,10 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java

@@ -24,7 +24,7 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -35,8 +35,8 @@ import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestUserDefinedCounters {
   private static String TEST_ROOT_DIR =

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java

@@ -20,8 +20,9 @@ package org.apache.hadoop.mapred;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestUtils {
   private static final Path[] LOG_PATHS = new Path[] {

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java

@@ -25,13 +25,13 @@ import org.apache.hadoop.io.serializer.Deserializer;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
 import org.apache.hadoop.util.GenericsUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestWritableJobConf {
 

+ 58 - 48
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java

@@ -20,11 +20,11 @@ package org.apache.hadoop.mapred;
 
 import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.doReturn;
@@ -117,11 +117,11 @@ import org.apache.log4j.Level;
 import org.apache.log4j.SimpleLayout;
 import org.apache.log4j.WriterAppender;
 import org.apache.log4j.spi.LoggingEvent;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 import org.slf4j.Logger;
@@ -179,12 +179,12 @@ public class TestYARNRunner {
   private  ClientServiceDelegate clientDelegate;
   private static final String failString = "Rejected job";
 
-  @BeforeClass
+  @BeforeAll
   public static void setupBeforeClass() {
     ResourceUtils.resetResourceTypes(new Configuration());
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     resourceMgrDelegate = mock(ResourceMgrDelegate.class);
     conf = new YarnConfiguration();
@@ -213,13 +213,14 @@ public class TestYARNRunner {
     testWorkDir.mkdirs();
   }
 
-  @After
+  @AfterEach
   public void cleanup() {
     FileUtil.fullyDelete(testWorkDir);
     ResourceUtils.resetResourceTypes(new Configuration());
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testJobKill() throws Exception {
     clientDelegate = mock(ClientServiceDelegate.class);
     when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
@@ -255,7 +256,8 @@ public class TestYARNRunner {
     verify(clientDelegate).killJob(jobId);
   }
 
-  @Test(timeout=60000)
+  @Test
+  @Timeout(value = 60)
   public void testJobKillTimeout() throws Exception {
     long timeToWaitBeforeHardKill =
         10000 + MRJobConfig.DEFAULT_MR_AM_HARD_KILL_TIMEOUT_MS;
@@ -276,12 +278,14 @@ public class TestYARNRunner {
             State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
     long startTimeMillis = System.currentTimeMillis();
     yarnRunner.killJob(jobId);
-    assertTrue("killJob should have waited at least " + timeToWaitBeforeHardKill
-        + " ms.", System.currentTimeMillis() - startTimeMillis
-                  >= timeToWaitBeforeHardKill);
+    assertTrue(System.currentTimeMillis() - startTimeMillis
+        >= timeToWaitBeforeHardKill,
+        "killJob should have waited at least " + timeToWaitBeforeHardKill
+        + " ms.");
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testJobSubmissionFailure() throws Exception {
     when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))).
     thenReturn(appId);
@@ -303,7 +307,8 @@ public class TestYARNRunner {
     }
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testResourceMgrDelegate() throws Exception {
     /* we not want a mock of resource mgr delegate */
     final ApplicationClientProtocol clientRMProtocol = mock(ApplicationClientProtocol.class);
@@ -371,7 +376,8 @@ public class TestYARNRunner {
     verify(clientRMProtocol).getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class));
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testGetHSDelegationToken() throws Exception {
     try {
       Configuration conf = new Configuration();
@@ -452,7 +458,8 @@ public class TestYARNRunner {
     }
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testHistoryServerToken() throws Exception {
     //Set the master principal in the config
     conf.set(YarnConfiguration.RM_PRINCIPAL,"foo@LOCAL");
@@ -495,7 +502,8 @@ public class TestYARNRunner {
         });
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testAMAdminCommandOpts() throws Exception {
     JobConf jobConf = new JobConf();
     
@@ -519,8 +527,8 @@ public class TestYARNRunner {
 
     for(String command : commands) {
       if(command != null) {
-        assertFalse("Profiler should be disabled by default",
-            command.contains(PROFILE_PARAMS));
+        assertFalse(command.contains(PROFILE_PARAMS),
+            "Profiler should be disabled by default");
         adminPos = command.indexOf("-Djava.net.preferIPv4Stack=true");
         if(adminPos >= 0)
           adminIndex = index;
@@ -536,20 +544,21 @@ public class TestYARNRunner {
     }
 
     // Check java.io.tmpdir opts are set in the commands
-    assertTrue("java.io.tmpdir is not set for AM", tmpDirPos > 0);
+    assertTrue(tmpDirPos > 0, "java.io.tmpdir is not set for AM");
 
     // Check both admin java opts and user java opts are in the commands
-    assertTrue("AM admin command opts not in the commands.", adminPos > 0);
-    assertTrue("AM user command opts not in the commands.", userPos > 0);
+    assertTrue(adminPos > 0, "AM admin command opts not in the commands.");
+    assertTrue(userPos > 0, "AM user command opts not in the commands.");
     
     // Check the admin java opts is before user java opts in the commands
     if(adminIndex == userIndex) {
-      assertTrue("AM admin command opts is after user command opts.", adminPos < userPos);
+      assertTrue(adminPos < userPos, "AM admin command opts is after user command opts.");
     } else {
-      assertTrue("AM admin command opts is after user command opts.", adminIndex < userIndex);
+      assertTrue(adminIndex < userIndex, "AM admin command opts is after user command opts.");
     }
   }
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testWarnCommandOpts() throws Exception {
     org.apache.log4j.Logger logger =
         org.apache.log4j.Logger.getLogger(YARNRunner.class);
@@ -583,7 +592,8 @@ public class TestYARNRunner {
         "using yarn.app.mapreduce.am.env config settings."));
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testAMProfiler() throws Exception {
     JobConf jobConf = new JobConf();
 
@@ -837,7 +847,7 @@ public class TestYARNRunner {
     ContainerLaunchContext clc = appSubCtx.getAMContainerSpec();
     Map<String, String> env = clc.getEnvironment();
     String libPath = env.get(pathKey);
-    assertNotNull(pathKey + " not set", libPath);
+    assertNotNull(libPath, pathKey + " not set");
     String cps = jobConf.getBoolean(
         MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM,
         MRConfig.DEFAULT_MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM)
@@ -852,12 +862,12 @@ public class TestYARNRunner {
           MRJobConfig.DEFAULT_MR_AM_ADMIN_USER_ENV.substring(
               pathKey.length() + 1);
     }
-    assertEquals("Bad AM " + pathKey + " setting", expectedLibPath, libPath);
+    assertEquals(expectedLibPath, libPath, "Bad AM " + pathKey + " setting");
 
     // make sure SHELL is set
     String shell = env.get(Environment.SHELL.name());
-    assertNotNull("SHELL not set", shell);
-    assertEquals("Bad SHELL setting", USER_SHELL, shell);
+    assertNotNull(shell, "SHELL not set");
+    assertEquals(USER_SHELL, shell, "Bad SHELL setting");
   }
 
   @Test
@@ -929,13 +939,13 @@ public class TestYARNRunner {
     Configuration confSent = BuilderUtils.parseTokensConf(submissionContext);
 
     // configs that match regex should be included
-    Assert.assertEquals("123.0.0.1",
+    assertEquals("123.0.0.1",
         confSent.get("dfs.namenode.rpc-address.mycluster2.nn1"));
-    Assert.assertEquals("123.0.0.2",
+    assertEquals("123.0.0.2",
         confSent.get("dfs.namenode.rpc-address.mycluster2.nn2"));
 
     // configs that aren't matching regex should not be included
-    Assert.assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent
+    assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent
         .get("hadoop.tmp.dir").equals("testconfdir"));
     UserGroupInformation.reset();
   }
@@ -957,15 +967,15 @@ public class TestYARNRunner {
     List<ResourceRequest> resourceRequests =
         submissionContext.getAMContainerResourceRequests();
 
-    Assert.assertEquals(1, resourceRequests.size());
+    assertEquals(1, resourceRequests.size());
     ResourceRequest resourceRequest = resourceRequests.get(0);
 
     ResourceInformation resourceInformation = resourceRequest.getCapability()
         .getResourceInformation(CUSTOM_RESOURCE_NAME);
-    Assert.assertEquals("Expecting the default unit (G)",
-        "G", resourceInformation.getUnits());
-    Assert.assertEquals(5L, resourceInformation.getValue());
-    Assert.assertEquals(3, resourceRequest.getCapability().getVirtualCores());
+    assertEquals("G", resourceInformation.getUnits(),
+        "Expecting the default unit (G)");
+    assertEquals(5L, resourceInformation.getValue());
+    assertEquals(3, resourceRequest.getCapability().getVirtualCores());
   }
 
   @Test
@@ -983,11 +993,11 @@ public class TestYARNRunner {
       List<ResourceRequest> resourceRequests =
           submissionContext.getAMContainerResourceRequests();
 
-      Assert.assertEquals(1, resourceRequests.size());
+      assertEquals(1, resourceRequests.size());
       ResourceRequest resourceRequest = resourceRequests.get(0);
 
       long memorySize = resourceRequest.getCapability().getMemorySize();
-      Assert.assertEquals(3072, memorySize);
+      assertEquals(3072, memorySize);
     }
   }
 
@@ -1012,11 +1022,11 @@ public class TestYARNRunner {
         List<ResourceRequest> resourceRequests =
             submissionContext.getAMContainerResourceRequests();
 
-        Assert.assertEquals(1, resourceRequests.size());
+        assertEquals(1, resourceRequests.size());
         ResourceRequest resourceRequest = resourceRequests.get(0);
 
         long memorySize = resourceRequest.getCapability().getMemorySize();
-        Assert.assertEquals(3072, memorySize);
+        assertEquals(3072, memorySize);
         assertTrue(testAppender.getLogEvents().stream().anyMatch(
             e -> e.getLevel() == Level.WARN && ("Configuration " +
                 "yarn.app.mapreduce.am.resource." + memoryName + "=3Gi is " +

+ 22 - 15
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java

@@ -24,15 +24,18 @@ import static org.mockito.Mockito.when;
 
 import java.util.ArrayList;
 
-import org.junit.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * This class performs unit test for Job/JobControl classes.
@@ -198,14 +201,15 @@ public class TestJobControl {
   }
 
   @SuppressWarnings("deprecation")
-  @Test(timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testJobState() throws Exception {
     Job job_1 = getCopyJob();
     JobControl jc = new JobControl("Test");
     jc.addJob(job_1);
-    Assert.assertEquals(Job.WAITING, job_1.getState());
+    assertEquals(Job.WAITING, job_1.getState());
     job_1.setState(Job.SUCCESS);
-    Assert.assertEquals(Job.WAITING, job_1.getState());
+    assertEquals(Job.WAITING, job_1.getState());
 
     org.apache.hadoop.mapreduce.Job mockjob =
         mock(org.apache.hadoop.mapreduce.Job.class);
@@ -213,20 +217,21 @@ public class TestJobControl {
         new org.apache.hadoop.mapreduce.JobID("test", 0);
     when(mockjob.getJobID()).thenReturn(jid);
     job_1.setJob(mockjob);
-    Assert.assertEquals("job_test_0000", job_1.getMapredJobID());
+    assertEquals("job_test_0000", job_1.getMapredJobID());
     job_1.setMapredJobID("job_test_0001");
-    Assert.assertEquals("job_test_0000", job_1.getMapredJobID());
+    assertEquals("job_test_0000", job_1.getMapredJobID());
     jc.stop();
   }
 
-  @Test(timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testAddingDependingJob() throws Exception {
     Job job_1 = getCopyJob();
     ArrayList<Job> dependingJobs = new ArrayList<Job>();
     JobControl jc = new JobControl("Test");
     jc.addJob(job_1);
-    Assert.assertEquals(Job.WAITING, job_1.getState());
-    Assert.assertTrue(job_1.addDependingJob(new Job(job_1.getJobConf(),
+    assertEquals(Job.WAITING, job_1.getState());
+    assertTrue(job_1.addDependingJob(new Job(job_1.getJobConf(),
       dependingJobs)));
   }
 
@@ -253,23 +258,25 @@ public class TestJobControl {
     return job_1;
   }
   
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testJobControl() throws Exception {
     doJobControlTest();
   }
   
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testGetAssignedJobId() throws Exception {
     JobConf jc = new JobConf();
     Job j = new Job(jc);
     //Just make sure no exception is thrown
-    Assert.assertNull(j.getAssignedJobID());
+    assertNull(j.getAssignedJobID());
     org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class);
     org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0);
     when(mockjob.getJobID()).thenReturn(jid);
     j.setJob(mockjob);
     JobID expected = new JobID("test",0);
-    Assert.assertEquals(expected, j.getAssignedJobID());
+    assertEquals(expected, j.getAssignedJobID());
     verify(mockjob).getJobID();
   }
   

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java

@@ -25,11 +25,11 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.JobConf;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * HadoopTestCase that tests the local job runner.
@@ -132,7 +132,7 @@ public class TestLocalJobControl extends HadoopTestCase {
       }
     }
 
-    assertEquals("Some jobs failed", 0, theControl.getFailedJobs().size());
+    assertEquals(0, theControl.getFailedJobs().size(), "Some jobs failed");
     theControl.stop();
   }
 

+ 20 - 19
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java

@@ -49,23 +49,24 @@ import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestDatamerge {
 
   private static MiniDFSCluster cluster = null;
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     Configuration conf = new Configuration();
     cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
   }
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     if (cluster != null) {
       cluster.shutdown();
@@ -131,7 +132,7 @@ public class TestDatamerge {
     public void close() { }
     public void configure(JobConf job) {
       srcs = job.getInt("testdatamerge.sources", 0);
-      assertTrue("Invalid src count: " + srcs, srcs > 0);
+      assertTrue(srcs > 0, "Invalid src count: " + srcs);
     }
     public abstract void map(IntWritable key, V val,
         OutputCollector<IntWritable, IntWritable> out, Reporter reporter)
@@ -143,7 +144,7 @@ public class TestDatamerge {
       while (values.hasNext()) {
         seen += values.next().get();
       }
-      assertTrue("Bad count for " + key.get(), verify(key.get(), seen));
+      assertTrue(verify(key.get(), seen), "Bad count for " + key.get());
     }
     public abstract boolean verify(int key, int occ);
   }
@@ -155,10 +156,10 @@ public class TestDatamerge {
         throws IOException {
       int k = key.get();
       final String kvstr = "Unexpected tuple: " + stringify(key, val);
-      assertTrue(kvstr, 0 == k % (srcs * srcs));
+      assertEquals(0, k % (srcs * srcs), kvstr);
       for (int i = 0; i < val.size(); ++i) {
         final int vali = ((IntWritable)val.get(i)).get();
-        assertTrue(kvstr, (vali - i) * srcs == 10 * k);
+        assertEquals((vali - i) * srcs, 10 * k, kvstr);
       }
       out.collect(key, one);
     }
@@ -177,18 +178,18 @@ public class TestDatamerge {
       final String kvstr = "Unexpected tuple: " + stringify(key, val);
       if (0 == k % (srcs * srcs)) {
         for (int i = 0; i < val.size(); ++i) {
-          assertTrue(kvstr, val.get(i) instanceof IntWritable);
+          assertTrue(val.get(i) instanceof IntWritable, kvstr);
           final int vali = ((IntWritable)val.get(i)).get();
-          assertTrue(kvstr, (vali - i) * srcs == 10 * k);
+          assertEquals((vali - i) * srcs, 10 * k, kvstr);
         }
       } else {
         for (int i = 0; i < val.size(); ++i) {
           if (i == k % srcs) {
-            assertTrue(kvstr, val.get(i) instanceof IntWritable);
+            assertTrue(val.get(i) instanceof IntWritable, kvstr);
             final int vali = ((IntWritable)val.get(i)).get();
-            assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
+            assertEquals(srcs * (vali - i), 10 * (k - i), kvstr);
           } else {
-            assertTrue(kvstr, !val.has(i));
+            assertFalse(val.has(i), kvstr);
           }
         }
       }
@@ -210,10 +211,10 @@ public class TestDatamerge {
       final int vali = val.get();
       final String kvstr = "Unexpected tuple: " + stringify(key, val);
       if (0 == k % (srcs * srcs)) {
-        assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1);
+        assertEquals(vali, k * 10 / srcs + srcs - 1, kvstr);
       } else {
         final int i = k % srcs;
-        assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
+        assertEquals(srcs * (vali - i), 10 * (k - i), kvstr);
       }
       out.collect(key, one);
     }

+ 25 - 21
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java

@@ -34,10 +34,10 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 public class TestTupleWritable {
 
@@ -97,7 +97,7 @@ public class TestTupleWritable {
         i = verifIter(writs, ((TupleWritable)w), i);
         continue;
       }
-      assertTrue("Bad value", w.equals(writs[i++]));
+      assertEquals(w, writs[i++], "Bad value");
     }
     return i;
   }
@@ -140,7 +140,7 @@ public class TestTupleWritable {
       new IntWritable(r.nextInt())
     };
     TupleWritable sTuple = makeTuple(writs);
-    assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
+    assertEquals(writs.length, verifIter(writs, sTuple, 0), "Bad count");
   }
 
   @Test
@@ -164,7 +164,7 @@ public class TestTupleWritable {
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     TupleWritable dTuple = new TupleWritable();
     dTuple.readFields(new DataInputStream(in));
-    assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
+    assertEquals(sTuple, dTuple, "Failed to write/read tuple");
   }
 
   @Test
@@ -183,8 +183,8 @@ public class TestTupleWritable {
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     TupleWritable dTuple = new TupleWritable();
     dTuple.readFields(new DataInputStream(in));
-    assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
-    assertEquals("All tuple data has not been read from the stream",-1,in.read());
+    assertEquals(sTuple, dTuple, "Failed to write/read tuple");
+    assertEquals(-1, in.read(), "All tuple data has not been read from the stream");
   }
 
   @Test
@@ -201,8 +201,8 @@ public class TestTupleWritable {
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     TupleWritable dTuple = new TupleWritable();
     dTuple.readFields(new DataInputStream(in));
-    assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
-    assertEquals("All tuple data has not been read from the stream",-1,in.read());
+    assertEquals(sTuple, dTuple, "Failed to write/read tuple");
+    assertEquals(-1, in.read(), "All tuple data has not been read from the stream");
   }
   
   /**
@@ -225,8 +225,8 @@ public class TestTupleWritable {
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     TupleWritable dTuple = new TupleWritable();
     dTuple.readFields(new DataInputStream(in));
-    assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
-    assertEquals("All tuple data has not been read from the stream",-1,in.read());
+    assertEquals(sTuple, dTuple, "Failed to write/read tuple");
+    assertEquals(-1, in.read(), "All tuple data has not been read from the stream");
   }
   @Test
   public void testWideTuple() throws Exception {
@@ -244,7 +244,7 @@ public class TestTupleWritable {
         assertTrue(has);
       }
       else {
-        assertFalse("Tuple position is incorrectly labelled as set: " + pos, has);
+        assertFalse(has, "Tuple position is incorrectly labelled as set: " + pos);
       }
     }
   }
@@ -264,7 +264,7 @@ public class TestTupleWritable {
         assertTrue(has);
       }
       else {
-        assertFalse("Tuple position is incorrectly labelled as set: " + pos, has);
+        assertFalse(has, "Tuple position is incorrectly labelled as set: " + pos);
       }
     }
   }
@@ -288,7 +288,7 @@ public class TestTupleWritable {
         assertTrue(has);
       }
       else {
-        assertFalse("Tuple position is incorrectly labelled as set: " + pos, has);
+        assertFalse(has, "Tuple position is incorrectly labelled as set: " + pos);
       }
     }
   }
@@ -311,8 +311,10 @@ public class TestTupleWritable {
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     TupleWritable dTuple = new TupleWritable();
     dTuple.readFields(new DataInputStream(in));
-    assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
-    assertEquals("All tuple data has not been read from the stream",-1,in.read());
+    assertTrue(oldTuple.isCompatible(dTuple),
+        "Tuple writable is unable to read pre-0.21 versions of TupleWritable");
+    assertEquals(-1, in.read(),
+        "All tuple data has not been read from the stream");
   }
   @Test
   public void testPreVersion21CompatibilityEmptyTuple() throws Exception {
@@ -324,8 +326,10 @@ public class TestTupleWritable {
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     TupleWritable dTuple = new TupleWritable();
     dTuple.readFields(new DataInputStream(in));
-    assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
-    assertEquals("All tuple data has not been read from the stream",-1,in.read());
+    assertTrue(oldTuple.isCompatible(dTuple),
+        "Tuple writable is unable to read pre-0.21 versions of TupleWritable");
+    assertEquals(-1, in.read(),
+        "All tuple data has not been read from the stream");
   }
   
   /**
@@ -335,7 +339,7 @@ public class TestTupleWritable {
   private static class PreVersion21TupleWritable {
     
     private Writable[] values;
-    private long written = 0L;
+    private long written;
 
     private PreVersion21TupleWritable(Writable[] vals) {
       written = 0L;

+ 5 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java

@@ -33,8 +33,8 @@ import org.apache.hadoop.mapred.JobConfigurable;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestWrappedRecordReaderClassloader {
   /**
@@ -123,9 +123,9 @@ public class TestWrappedRecordReaderClassloader {
 
     @SuppressWarnings("unchecked")
     public RR_ClassLoaderChecker(JobConf job) {
-      assertTrue("The class loader has not been inherited from "
-          + CompositeRecordReader.class.getSimpleName(),
-          job.getClassLoader() instanceof Fake_ClassLoader);
+      assertTrue(job.getClassLoader() instanceof Fake_ClassLoader,
+          "The class loader has not been inherited from "
+          + CompositeRecordReader.class.getSimpleName());
 
       keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
           NullWritable.class, WritableComparable.class);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.mapred.lib;
 
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reducer;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import static org.assertj.core.api.Assertions.assertThat;
 

+ 6 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java

@@ -33,16 +33,16 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Iterator;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotSame;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class TestChainMapReduce extends HadoopTestCase {
 
@@ -108,7 +108,7 @@ public class TestChainMapReduce extends HadoopTestCase {
 
     fs.delete(outDir, true);
     if (!fs.mkdirs(inDir)) {
-      throw new IOException("Mkdirs failed to create " + inDir.toString());
+      throw new IOException("Mkdirs failed to create " + inDir);
     }
 
     DataOutputStream file = fs.create(new Path(inDir, "part-0"));

+ 4 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java

@@ -30,9 +30,10 @@ import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestDelegatingInputFormat {
   @Test

+ 5 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java

@@ -34,10 +34,10 @@ import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.Utils;
-import org.junit.After;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -133,7 +133,7 @@ public class TestKeyFieldBasedComparator extends HadoopTestCase {
     }
   }
 
-  @After
+  @AfterEach
   public void cleanup() {
     FileUtil.fullyDelete(TEST_DIR);
   }

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java

@@ -17,11 +17,11 @@
  */
 package org.apache.hadoop.mapred.lib;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestKeyFieldBasedPartitioner {
 
@@ -35,8 +35,8 @@ public class TestKeyFieldBasedPartitioner {
     JobConf conf = new JobConf();
     conf.setInt("num.key.fields.for.partition", 10);
     kfbp.configure(conf);
-    assertEquals("Empty key should map to 0th partition", 
-                 0, kfbp.getPartition(new Text(), new Text(), 10));
+    assertEquals(0, kfbp.getPartition(new Text(), new Text(), 10),
+        "Empty key should map to 0th partition");
   }
 
   @Test

+ 11 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java

@@ -24,8 +24,8 @@ import java.util.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestLineInputFormat {
   private static int MAX_LENGTH = 200;
@@ -84,21 +84,21 @@ public class TestLineInputFormat {
     InputSplit[] splits = format.getSplits(job, ignoredNumSplits);
 
     // check all splits except last one
-    int count = 0;
+    int count;
     for (int j = 0; j < splits.length -1; j++) {
-      assertEquals("There are no split locations", 0,
-                   splits[j].getLocations().length);
+      assertEquals(0, splits[j].getLocations().length,
+          "There are no split locations");
       RecordReader<LongWritable, Text> reader =
         format.getRecordReader(splits[j], job, voidReporter);
       Class readerClass = reader.getClass();
-      assertEquals("reader class is LineRecordReader.",
-                   LineRecordReader.class, readerClass);        
+      assertEquals(LineRecordReader.class, readerClass,
+          "reader class is LineRecordReader.");
       LongWritable key = reader.createKey();
       Class keyClass = key.getClass();
-      assertEquals("Key class is LongWritable.", LongWritable.class, keyClass);
+      assertEquals(LongWritable.class, keyClass, "Key class is LongWritable.");
       Text value = reader.createValue();
       Class valueClass = value.getClass();
-      assertEquals("Value class is Text.", Text.class, valueClass);
+      assertEquals(Text.class, valueClass, "Value class is Text.");
          
       try {
         count = 0;
@@ -108,8 +108,8 @@ public class TestLineInputFormat {
       } finally {
         reader.close();
       }
-      assertEquals("number of lines in split is " + expectedN ,
-                   expectedN, count);
+      assertEquals(expectedN, count,
+          "number of lines in split is " + expectedN);
     }
   }
   

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java

@@ -25,12 +25,12 @@ import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.util.Map;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * @see TestDelegatingInputFormat

برخی فایل ها در این مقایسه diff نمایش داده نمی شوند زیرا تعداد فایل ها بسیار زیاد است