Explorar o código

HADOOP-19441. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-streaming. (#7554)

Co-authored-by: Chris Nauroth <cnauroth@apache.org>
Co-authored-by: Hualong Zhang <hualong.z@hotmail.com>
Reviewed-by: Chris Nauroth <cnauroth@apache.org>
Reviewed-by: Hualong Zhang <hualong.z@hotmail.com>
Signed-off-by: Shilun Fan <slfan1989@apache.org>
slfan1989 hai 3 semanas
pai
achega
3d2f4d669e
Modificáronse 32 ficheiros con 159 adicións e 146 borrados
  1. 11 11
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestAutoInputFormat.java
  2. 4 4
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestClassWithNoPackage.java
  3. 6 7
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestDumpTypedBytes.java
  4. 4 4
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
  5. 6 6
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestLoadTypedBytes.java
  6. 9 8
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMRFramework.java
  7. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
  8. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestRawBytesStreaming.java
  9. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamAggregate.java
  10. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java
  11. 17 10
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamJob.java
  12. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
  13. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlMultipleRecords.java
  14. 8 7
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java
  15. 6 6
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBackground.java
  16. 7 7
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
  17. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingCombiner.java
  18. 6 5
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingCounters.java
  19. 7 6
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingExitStatus.java
  20. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingFailure.java
  21. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java
  22. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingOutputKeyValueTypes.java
  23. 1 1
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingOutputOnlyKeys.java
  24. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java
  25. 7 6
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingStatus.java
  26. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingStderr.java
  27. 5 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
  28. 6 6
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
  29. 5 5
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java
  30. 7 7
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/io/TestKeyOnlyTextOutputReader.java
  31. 7 7
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
  32. 2 2
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java

+ 11 - 11
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestAutoInputFormat.java

@@ -34,10 +34,10 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.streaming.AutoInputFormat;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestAutoInputFormat {
 
@@ -93,15 +93,15 @@ public class TestAutoInputFormat {
       try {
         while (reader.next(key, value)) {
           if (key instanceof LongWritable) {
-            assertEquals("Wrong value class.", Text.class, value.getClass());
-            assertTrue("Invalid value", Integer.parseInt(((Text) value)
-              .toString()) % 10 == 0);
+            assertEquals(Text.class, value.getClass(), "Wrong value class.");
+            assertTrue(Integer.parseInt(((Text) value)
+                .toString()) % 10 == 0, "Invalid value");
           } else {
-            assertEquals("Wrong key class.", IntWritable.class, key.getClass());
-            assertEquals("Wrong value class.", LongWritable.class, value
-              .getClass());
-            assertTrue("Invalid key.", ((IntWritable) key).get() % 11 == 0);
-            assertTrue("Invalid value.", ((LongWritable) value).get() % 12 == 0);
+            assertEquals(IntWritable.class, key.getClass(), "Wrong key class.");
+            assertEquals(LongWritable.class, value
+                .getClass(), "Wrong value class.");
+            assertTrue(((IntWritable) key).get() % 11 == 0, "Invalid key.");
+            assertTrue(((LongWritable) value).get() % 12 == 0, "Invalid value.");
           }
         }
       } finally {

+ 4 - 4
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestClassWithNoPackage.java

@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.streaming;
 
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+
 import java.net.URL;
 import java.net.URLClassLoader;
-import java.net.MalformedURLException;
 
 import org.apache.hadoop.util.JarFinder;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -46,7 +46,7 @@ public class TestClassWithNoPackage
     // Get class with no package name.
     String defaultPackage = this.getClass().getPackage().getName();
     Class c = StreamUtil.goodClassOrNull(conf, NAME, defaultPackage);
-    assertNotNull("Class " + NAME + " not found!", c);
+    assertNotNull(c, "Class " + NAME + " not found!");
   }
   public static void main(String[]args) throws Exception
   {

+ 6 - 7
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestDumpTypedBytes.java

@@ -28,11 +28,11 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.streaming.DumpTypedBytes;
 import org.apache.hadoop.typedbytes.TypedBytesInput;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestDumpTypedBytes {
 
@@ -65,7 +65,7 @@ public class TestDumpTypedBytes {
       String[] args = new String[1];
       args[0] = "/typedbytestest";
       int ret = dumptb.run(args);
-      assertEquals("Return value != 0.", 0, ret);
+      assertEquals(0, ret, "Return value != 0.");
 
       ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
       TypedBytesInput tbinput = new TypedBytesInput(new DataInputStream(in));
@@ -75,12 +75,11 @@ public class TestDumpTypedBytes {
         assertEquals(Long.class, key.getClass()); // offset
         Object value = tbinput.read();
         assertEquals(String.class, value.getClass());
-        assertTrue("Invalid output.",
-          Integer.parseInt(value.toString()) % 10 == 0);
+        assertTrue(Integer.parseInt(value.toString()) % 10 == 0, "Invalid output.");
         counter++;
         key = tbinput.read();
       }
-      assertEquals("Wrong number of outputs.", 100, counter);
+      assertEquals(100, counter, "Wrong number of outputs.");
     } finally {
       try {
         fs.close();

+ 4 - 4
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java

@@ -30,8 +30,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.util.Shell;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * This class tests that the '-file' argument to streaming results
@@ -65,7 +65,7 @@ public class TestFileArgs extends TestStreaming
     setTestDir(new File("/tmp/TestFileArgs"));
   }
 
-  @Before
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     // Set up side file
@@ -79,7 +79,7 @@ public class TestFileArgs extends TestStreaming
     input = "";
   }
 
-  @After
+  @AfterEach
   @Override
   public void tearDown() {
     if (mr != null) {

+ 6 - 6
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestLoadTypedBytes.java

@@ -31,8 +31,9 @@ import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.typedbytes.TypedBytesOutput;
 import org.apache.hadoop.typedbytes.TypedBytesWritable;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestLoadTypedBytes {
 
@@ -62,7 +63,7 @@ public class TestLoadTypedBytes {
       String[] args = new String[1];
       args[0] = "/typedbytestest/test.seq";
       int ret = loadtb.run(args);
-      assertEquals("Return value != 0.", 0, ret);
+      assertEquals(0, ret, "Return value != 0.");
 
       Path file = new Path(root, "test.seq");
       assertTrue(fs.exists(file));
@@ -73,11 +74,10 @@ public class TestLoadTypedBytes {
       while (reader.next(key, value)) {
         assertEquals(Long.class, key.getValue().getClass());
         assertEquals(String.class, value.getValue().getClass());
-        assertTrue("Invalid record.",
-          Integer.parseInt(value.toString()) % 10 == 0);
+        assertTrue(Integer.parseInt(value.toString()) % 10 == 0, "Invalid record.");
         counter++;
       }
-      assertEquals("Wrong number of records.", 100, counter);
+      assertEquals(100, counter, "Wrong number of records.");
     } finally {
       try {
         fs.close();

+ 9 - 8
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMRFramework.java

@@ -17,12 +17,13 @@
  */
 package org.apache.hadoop.streaming;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestMRFramework {
 
@@ -31,18 +32,18 @@ public class TestMRFramework {
     JobConf jobConf = new JobConf();
     jobConf.set(JTConfig.JT_IPC_ADDRESS, MRConfig.LOCAL_FRAMEWORK_NAME);
     jobConf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
-    assertFalse("Expected 'isLocal' to be false", 
-        StreamUtil.isLocalJobTracker(jobConf));
+    assertFalse(StreamUtil.isLocalJobTracker(jobConf),
+        "Expected 'isLocal' to be false");
     
     jobConf.set(JTConfig.JT_IPC_ADDRESS, MRConfig.LOCAL_FRAMEWORK_NAME);
     jobConf.set(MRConfig.FRAMEWORK_NAME, MRConfig.CLASSIC_FRAMEWORK_NAME);
-    assertFalse("Expected 'isLocal' to be false", 
-        StreamUtil.isLocalJobTracker(jobConf));
+    assertFalse(StreamUtil.isLocalJobTracker(jobConf),
+        "Expected 'isLocal' to be false");
     
     jobConf.set(JTConfig.JT_IPC_ADDRESS, "jthost:9090");
     jobConf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME);
-    assertTrue("Expected 'isLocal' to be true", 
-        StreamUtil.isLocalJobTracker(jobConf));
+    assertTrue(StreamUtil.isLocalJobTracker(jobConf),
+        "Expected 'isLocal' to be true");
   }
 
 }

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java

@@ -26,8 +26,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestRawBytesStreaming.java

@@ -27,8 +27,8 @@ import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestRawBytesStreaming {
 

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamAggregate.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import java.io.*;
 import java.nio.charset.StandardCharsets;
 

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java

@@ -23,8 +23,8 @@ import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * This class tests hadoopStreaming in MapReduce local mode.

+ 17 - 10
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamJob.java

@@ -25,23 +25,30 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.KeyValueTextInputFormat;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 /**
  * This class tests hadoop Streaming's StreamJob class.
  */
 public class TestStreamJob {
   
-  @Test(expected = IllegalArgumentException.class)
+  @Test
   public void testCreateJobWithExtraArgs() throws IOException {
-    ArrayList<String> dummyArgs = new ArrayList<String>();
-    dummyArgs.add("-input"); dummyArgs.add("dummy");
-    dummyArgs.add("-output"); dummyArgs.add("dummy");
-    dummyArgs.add("-mapper"); dummyArgs.add("dummy");
-    dummyArgs.add("dummy");
-    dummyArgs.add("-reducer"); dummyArgs.add("dummy");
-    StreamJob.createJob(dummyArgs.toArray(new String[] {}));
+    assertThrows(IllegalArgumentException.class, () -> {
+      ArrayList<String> dummyArgs = new ArrayList<String>();
+      dummyArgs.add("-input");
+      dummyArgs.add("dummy");
+      dummyArgs.add("-output");
+      dummyArgs.add("dummy");
+      dummyArgs.add("-mapper");
+      dummyArgs.add("dummy");
+      dummyArgs.add("dummy");
+      dummyArgs.add("-reducer");
+      dummyArgs.add("dummy");
+      StreamJob.createJob(dummyArgs.toArray(new String[] {}));
+    });
   }
   
   @Test

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java

@@ -22,8 +22,8 @@ import java.io.*;
 import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
 
-import static org.junit.Assert.*;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 /**
  * This class tests hadoopStreaming in MapReduce local mode.

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlMultipleRecords.java

@@ -26,8 +26,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  * Tests if StreamXmlRecordReader will read the next record, _after_ the
@@ -72,7 +72,7 @@ public class TestStreamXmlMultipleRecords extends TestStreaming
   }
 
   @Override
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     super.setUp();
     // Without this closeAll() call, setting of FileSystem block size is

+ 8 - 7
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java

@@ -26,10 +26,11 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.hadoop.util.JarFinder;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -101,14 +102,14 @@ public class TestStreaming
     INPUT_FILE = new File(testDir, "input.txt");
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     UtilTest.recursiveDelete(TEST_DIR);
-    assertTrue("Creating " + TEST_DIR, TEST_DIR.mkdirs());
+    assertTrue(TEST_DIR.mkdirs(), "Creating " + TEST_DIR);
     args.clear();
   }
 
-  @After
+  @AfterEach
   public void tearDown() {
     UtilTest.recursiveDelete(TEST_DIR);
   }

+ 6 - 6
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBackground.java

@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.streaming;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  * This class tests if hadoopStreaming background works fine. A DelayEchoApp
@@ -57,7 +57,7 @@ public class TestStreamingBackground {
       "-jobconf", "mapreduce.task.io.sort.mb=10" 
   };
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     UtilTest.recursiveDelete(TEST_DIR);
     assertTrue(TEST_DIR.mkdirs());
@@ -74,7 +74,7 @@ public class TestStreamingBackground {
     StreamJob job = new StreamJob(args, mayExit);
     returnStatus = job.go();
 
-    assertEquals("Streaming Job expected to succeed", 0, returnStatus);
+    assertEquals(0, returnStatus, "Streaming Job expected to succeed");
     job.running_.killJob();
     job.running_.waitForCompletion();
   }

+ 7 - 7
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java

@@ -31,7 +31,7 @@ import java.util.List;
 import java.util.Properties;
 import java.util.StringTokenizer;
 
-import org.junit.BeforeClass;
+import org.junit.jupiter.api.BeforeAll;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileUtil;
@@ -42,11 +42,11 @@ import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestStreamingBadRecords extends ClusterMapReduceTestCase
 {
@@ -66,7 +66,7 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
     UtilTest.makeJavaCommand(BadApp.class, new String[]{"true"});
   private static final int INPUTSIZE=100;
 
-  @BeforeClass
+  @BeforeAll
   public static void setupClass() throws Exception {
     setupClassBase(TestStreamingBadRecords.class);
   }
@@ -78,7 +78,7 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
     utilTest.redirectIfAntJunit();
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     Properties props = new Properties();
     props.setProperty(JTConfig.JT_RETIREJOBS, "false");

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingCombiner.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 
 import org.apache.hadoop.mapred.Counters;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestStreamingCombiner extends TestStreaming {
 

+ 6 - 5
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingCounters.java

@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
 
 import java.io.IOException;
 
@@ -43,11 +44,11 @@ public class TestStreamingCounters extends TestStreaming {
   
   private void validateCounters() throws IOException {
     Counters counters = job.running_.getCounters();
-    assertNotNull("Counters", counters);
+    assertNotNull(counters, "Counters");
     Group group = counters.getGroup("UserCounters");
-    assertNotNull("Group", group);
+    assertNotNull(group, "Group");
     Counter counter = group.getCounterForName("InputLines");
-    assertNotNull("Counter", counter);
+    assertNotNull(counter, "Counter");
     assertEquals(3, counter.getCounter());
   }
 }

+ 7 - 6
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingExitStatus.java

@@ -18,9 +18,10 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import org.junit.Before;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeEach;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.*;
 import java.util.*;
@@ -62,7 +63,7 @@ public class TestStreamingExitStatus
     };
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     UtilTest.recursiveDelete(TEST_DIR);
     assertTrue(TEST_DIR.mkdirs());
@@ -80,9 +81,9 @@ public class TestStreamingExitStatus
     returnStatus = job.go();
     
     if (exitStatusIsFailure) {
-      assertEquals("Streaming Job failure code expected", /*job not successful:*/1, returnStatus);
+      assertEquals(/*job not successful:*/1, returnStatus, "Streaming Job failure code expected");
     } else {
-      assertEquals("Streaming Job expected to succeed", 0, returnStatus);
+      assertEquals(0, returnStatus, "Streaming Job expected to succeed");
     }
   }
 

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingFailure.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.File;
 import java.io.IOException;
@@ -50,6 +50,6 @@ public class TestStreamingFailure extends TestStreaming
   @Test
   public void testCommandLine() throws IOException {
     int returnStatus = runStreamJob();
-    assertEquals("Streaming Job Failure code expected", 5, returnStatus);
+    assertEquals(5, returnStatus, "Streaming Job Failure code expected");
   }
 }

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.*;
 import java.nio.charset.StandardCharsets;

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingOutputKeyValueTypes.java

@@ -26,8 +26,8 @@ import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.util.Iterator;
@@ -45,7 +45,7 @@ public class TestStreamingOutputKeyValueTypes extends TestStreaming {
     input = "one line dummy input\n";
   }
 
-  @Before
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     args.clear();

+ 1 - 1
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingOutputOnlyKeys.java

@@ -20,7 +20,7 @@ package org.apache.hadoop.streaming;
 
 import java.io.IOException;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestStreamingOutputOnlyKeys extends TestStreaming {
 

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.*;
 import java.nio.charset.StandardCharsets;

+ 7 - 6
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingStatus.java

@@ -25,10 +25,11 @@ import java.io.File;
 import org.apache.hadoop.mapred.MiniMRClientCluster;
 import org.apache.hadoop.mapred.MiniMRClientClusterFactory;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -101,7 +102,7 @@ public class TestStreamingStatus {
    *
    * @throws IOException
    */
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     conf = new JobConf();
     conf.setBoolean(JTConfig.JT_RETIREJOBS, false);
@@ -119,7 +120,7 @@ public class TestStreamingStatus {
   /**
    * Kill the cluster after the test is done.
    */
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     if (fs != null) {
       clean(fs);

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingStderr.java

@@ -24,8 +24,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * Test that streaming consumes stderr from the streaming process
@@ -82,7 +82,7 @@ public class TestStreamingStderr
 
     StreamJob job = new StreamJob(genArgs(input, output, preLines, duringLines, postLines), mayExit);
     returnStatus = job.go();
-    assertEquals("StreamJob success", 0, returnStatus);
+    assertEquals(0, returnStatus, "StreamJob success");
   }
 
   // This test will fail by blocking forever if the stderr isn't

+ 5 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java

@@ -25,8 +25,9 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -53,7 +54,8 @@ public class TestSymLink
   String cacheString = "This is just the cache string";
   StreamJob job;
 
-  @Test (timeout = 120000)
+  @Test
+  @Timeout(value = 120)
   public void testSymLink() throws Exception
   {
     boolean mayExit = false;

+ 6 - 6
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java

@@ -27,10 +27,10 @@ import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestTypedBytesStreaming {
 
@@ -65,8 +65,8 @@ public class TestTypedBytesStreaming {
     };
   }
 
-  @Before
-  @After
+  @BeforeEach
+  @AfterEach
   public void cleanupOutput() throws Exception {
     FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     INPUT_FILE.delete();

+ 5 - 5
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java

@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.streaming;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.DataOutputStream;
 import java.io.File;
@@ -29,7 +29,7 @@ import java.nio.charset.StandardCharsets;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestUnconsumedInput {
   protected final int EXPECTED_OUTPUT_SIZE = 10000;
@@ -91,11 +91,11 @@ public class TestUnconsumedInput {
       job = new StreamJob();
       job.setConf(conf);
       int exitCode = job.run(genArgs());
-      assertEquals("Job failed", 0, exitCode);
+      assertEquals(0, exitCode, "Job failed");
       outFile = new File(OUTPUT_DIR, outFileName).getAbsoluteFile();
       String output = StreamUtil.slurp(outFile);
-      assertEquals("Output was truncated", EXPECTED_OUTPUT_SIZE,
-          StringUtils.countMatches(output, "\t"));
+      assertEquals(EXPECTED_OUTPUT_SIZE, StringUtils.countMatches(output, "\t"),
+          "Output was truncated");
     } finally {
       INPUT_FILE.delete();
       FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());

+ 7 - 7
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/io/TestKeyOnlyTextOutputReader.java

@@ -18,18 +18,18 @@
 
 package org.apache.hadoop.streaming.io;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.IOException;
 
-import org.junit.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.streaming.PipeMapRed;
 import org.apache.hadoop.streaming.PipeMapper;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestKeyOnlyTextOutputReader {
   @Test
@@ -39,12 +39,12 @@ public class TestKeyOnlyTextOutputReader {
     KeyOnlyTextOutputReader outputReader = new KeyOnlyTextOutputReader();
     outputReader.initialize(pipeMapRed);
     outputReader.readKeyValue();
-    Assert.assertEquals(new Text("key,value"), outputReader.getCurrentKey());
+    assertEquals(new Text("key,value"), outputReader.getCurrentKey());
     outputReader.readKeyValue();
-    Assert.assertEquals(new Text("key2,value2"), outputReader.getCurrentKey());
+    assertEquals(new Text("key2,value2"), outputReader.getCurrentKey());
     outputReader.readKeyValue();
-    Assert.assertEquals(new Text("nocomma"), outputReader.getCurrentKey());
-    Assert.assertEquals(false, outputReader.readKeyValue());
+    assertEquals(new Text("nocomma"), outputReader.getCurrentKey());
+    assertEquals(false, outputReader.readKeyValue());
   }
   
   private class MyPipeMapRed extends PipeMapper {

+ 7 - 7
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.streaming.mapreduce;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
 import java.io.FileOutputStream;
@@ -38,9 +38,9 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  * This class tests StreamXmlRecordReader The test creates an XML file, uses
@@ -96,7 +96,7 @@ public class TestStreamXmlRecordReader {
     return contents;
   }
 
-  @Before
+  @BeforeEach
   public void createInput() throws IOException {
     FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
     String dummyXmlStartTag = "<PATTERN>\n";
@@ -137,7 +137,7 @@ public class TestStreamXmlRecordReader {
 
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     fs.delete(OUTPUT_DIR, true);
   }

+ 2 - 2
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java

@@ -26,8 +26,8 @@ import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestTypedBytesWritable {