Переглянути джерело

HADOOP-19415. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-common Part11. (#7671)

* HADOOP-19415. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-common Part11.

Co-authored-by: Chris Nauroth <cnauroth@apache.org>
Co-authored-by: Hualong Zhang <hualong.z@hotmail.com>
Reviewed-by: Chris Nauroth <cnauroth@apache.org>
Reviewed-by: Hualong Zhang <hualong.z@hotmail.com>
Signed-off-by: Shilun Fan <slfan1989@apache.org>
slfan1989 1 місяць тому
батько
коміт
1d4c4e4bc1
71 змінених файлів з 957 додано та 847 видалено
  1. 23 31
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/CompressDecompressTester.java
  2. 7 7
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBZip2Codec.java
  3. 8 8
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBlockDecompressorStream.java
  4. 75 68
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
  5. 11 12
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java
  6. 47 32
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java
  7. 5 6
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java
  8. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorDecompressor.java
  9. 6 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorStream.java
  10. 5 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestDecompressorStream.java
  11. 8 8
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestGzipCodec.java
  12. 6 6
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBZip2TextFileWriter.java
  13. 18 16
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBzip2CompressorDecompressor.java
  14. 22 21
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/lz4/TestLz4CompressorDecompressor.java
  15. 24 27
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/snappy/TestSnappyCompressorDecompressor.java
  16. 43 42
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java
  17. 72 56
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java
  18. 20 20
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
  19. 4 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRegistry.java
  20. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
  21. 6 10
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestECSchema.java
  22. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestErasureCodingEncodeAndDecode.java
  23. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/codec/TestHHXORErasureCodec.java
  24. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java
  25. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java
  26. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java
  27. 5 9
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java
  28. 5 10
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java
  29. 8 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestCoderUtil.java
  30. 57 34
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDecodingValidator.java
  31. 7 6
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java
  32. 6 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java
  33. 6 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java
  34. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java
  35. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java
  36. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java
  37. 5 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java
  38. 5 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java
  39. 13 11
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java
  40. 4 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawErasureCoderBenchmark.java
  41. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java
  42. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderBase.java
  43. 5 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java
  44. 5 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java
  45. 6 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestCompression.java
  46. 42 41
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java
  47. 49 45
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java
  48. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java
  49. 10 10
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java
  50. 2 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java
  51. 2 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsByteArrays.java
  52. 2 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java
  53. 3 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsByteArrays.java
  54. 3 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java
  55. 3 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsStreams.java
  56. 5 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java
  57. 5 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
  58. 17 17
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java
  59. 8 8
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
  60. 6 6
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
  61. 17 18
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
  62. 127 97
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
  63. 8 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIoInit.java
  64. 23 16
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java
  65. 18 22
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestConnectionRetryPolicy.java
  66. 7 10
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java
  67. 6 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java
  68. 4 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
  69. 15 15
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java
  70. 3 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
  71. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java

+ 23 - 31
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/CompressDecompressTester.java

@@ -17,10 +17,11 @@
  */
 package org.apache.hadoop.io.compress;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -37,13 +38,11 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.log4j.Logger;
-import org.junit.Assert;
 
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
-import static org.junit.Assert.*;
 
 public class CompressDecompressTester<T extends Compressor, E extends Decompressor> {
 
@@ -274,12 +273,10 @@ public class CompressDecompressTester<T extends Compressor, E extends Decompress
         int maxCompressedLength = 32 + rawData.length + rawData.length/6;
         byte[] compressedResult = new byte[maxCompressedLength];
         byte[] decompressedBytes = new byte[rawData.length];
-        assertTrue(
-            joiner.join(name, "compressor.needsInput before error !!!"),
-            compressor.needsInput());
-        assertEquals(
-              joiner.join(name, "compressor.getBytesWritten before error !!!"),
-            0, compressor.getBytesWritten());
+        assertTrue(compressor.needsInput(),
+            joiner.join(name, "compressor.needsInput before error !!!"));
+        assertEquals(0, compressor.getBytesWritten(),
+            joiner.join(name, "compressor.getBytesWritten before error !!!"));
         compressor.setInput(rawData, 0, rawData.length);
         compressor.finish();
         while (!compressor.finished()) {
@@ -288,23 +285,20 @@ public class CompressDecompressTester<T extends Compressor, E extends Decompress
         }
         compressor.reset();
 
-        assertTrue(
-            joiner.join(name, "decompressor.needsInput() before error !!!"),
-            decompressor.needsInput());
+        assertTrue(decompressor.needsInput(),
+            joiner.join(name, "decompressor.needsInput() before error !!!"));
         decompressor.setInput(compressedResult, 0, cSize);
-        assertFalse(
-            joiner.join(name, "decompressor.needsInput() after error !!!"),
-            decompressor.needsInput());
+        assertFalse(decompressor.needsInput(),
+            joiner.join(name, "decompressor.needsInput() after error !!!"));
         while (!decompressor.finished()) {
           decompressedSize = decompressor.decompress(decompressedBytes, 0,
               decompressedBytes.length);
         }
         decompressor.reset();
-        assertEquals(joiner.join(name, " byte size not equals error !!!"),
-            rawData.length, decompressedSize);
-        assertArrayEquals(
-            joiner.join(name, " byte arrays not equals error !!!"), rawData,
-            decompressedBytes);
+        assertEquals(rawData.length, decompressedSize,
+            joiner.join(name, " byte size not equals error !!!"));
+        assertArrayEquals(rawData, decompressedBytes,
+            joiner.join(name, " byte arrays not equals error !!!"));
       }
     }),
 
@@ -331,17 +325,16 @@ public class CompressDecompressTester<T extends Compressor, E extends Decompress
           // check compressed output
           buf = bytesOut.toByteArray();
           int emSize = emptySize.get(compressor.getClass());
-          Assert.assertEquals(
-              joiner.join(name, "empty stream compressed output size != "
-                  + emSize), emSize, buf.length);
+          assertEquals(emSize, buf.length,
+              joiner.join(name, "empty stream compressed output size != " + emSize));
           // use compressed output as input for decompression
           bytesIn = new ByteArrayInputStream(buf);
           // create decompression stream
           blockDecompressorStream = new BlockDecompressorStream(bytesIn,
               decompressor, 1024);
           // no byte is available because stream was closed
-          assertEquals(joiner.join(name, " return value is not -1"), -1,
-              blockDecompressorStream.read());
+          assertEquals(-1,
+              blockDecompressorStream.read(), joiner.join(name, " return value is not -1"));
         } catch (IOException e) {
           fail(joiner.join(name, e.getMessage()));
         } finally {
@@ -407,9 +400,8 @@ public class CompressDecompressTester<T extends Compressor, E extends Decompress
             decompressor.reset();
             off = off + step;
           }
-          assertArrayEquals(
-              joiner.join(name, "byte arrays not equals error !!!"),
-              originalRawData, decompressOut.toByteArray());
+          assertArrayEquals(originalRawData, decompressOut.toByteArray(),
+              joiner.join(name, "byte arrays not equals error !!!"));
         } catch (Exception ex) {
           throw new AssertionError(name + ex, ex);
         } finally {

+ 7 - 7
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBZip2Codec.java

@@ -22,9 +22,9 @@ import java.io.InputStream;
 import java.util.List;
 
 import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -41,8 +41,8 @@ import static org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter.BLOCK_SIZE
 import static org.apache.hadoop.util.Preconditions.checkArgument;
 import static org.assertj.core.api.Assertions.assertThatNullPointerException;
 import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public final class TestBZip2Codec {
 
@@ -54,7 +54,7 @@ public final class TestBZip2Codec {
   private Decompressor decompressor;
   private Path tempFile;
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     conf = new Configuration();
 
@@ -71,7 +71,7 @@ public final class TestBZip2Codec {
     decompressor = CodecPool.getDecompressor(codec);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     CodecPool.returnDecompressor(decompressor);
     fs.delete(tempFile, /* recursive */ false);

+ 8 - 8
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBlockDecompressorStream.java

@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.io.compress;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -29,7 +29,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestBlockDecompressorStream {
   
@@ -63,8 +63,8 @@ public class TestBlockDecompressorStream {
     
     // check compressed output 
     buf = bytesOut.toByteArray();
-    assertEquals("empty file compressed output size is not " + (bufLen + 4),
-        bufLen + 4, buf.length);
+    assertEquals(bufLen + 4, buf.length,
+        "empty file compressed output size is not " + (bufLen + 4));
     
     // use compressed output as input for decompression
     bytesIn = new ByteArrayInputStream(buf);
@@ -72,8 +72,8 @@ public class TestBlockDecompressorStream {
     // get decompression stream
     try (BlockDecompressorStream blockDecompressorStream =
       new BlockDecompressorStream(bytesIn, new FakeDecompressor(), 1024)) {
-      assertEquals("return value is not -1", 
-          -1 , blockDecompressorStream.read());
+      assertEquals(-1, blockDecompressorStream.read(),
+          "return value is not -1");
     } catch (IOException e) {
       fail("unexpected IOException : " + e);
     }

+ 75 - 68
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java

@@ -18,13 +18,13 @@
 package org.apache.hadoop.io.compress;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import static org.junit.Assume.assumeTrue;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
@@ -77,8 +77,9 @@ import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -90,7 +91,7 @@ public class TestCodec {
   private int count = 10000;
   private int seed = new Random().nextInt();
 
-  @After
+  @AfterEach
   public void after() {
     ZlibFactory.loadNativeZLib();
   }
@@ -114,7 +115,8 @@ public class TestCodec {
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testBZip2Codec() throws IOException {
     Configuration conf = new Configuration();
     conf.set("io.compression.codec.bzip2.library", "java-builtin");
@@ -122,7 +124,8 @@ public class TestCodec {
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");
   }
   
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testBZip2NativeCodec() throws IOException {
     Configuration conf = new Configuration();
     conf.set("io.compression.codec.bzip2.library", "system-native");
@@ -216,8 +219,8 @@ public class TestCodec {
       deflateFilter.finish();
     }
     if (leasedCompressorsBefore > -1) {
-      assertEquals("leased compressor not returned to the codec pool",
-          leasedCompressorsBefore, CodecPool.getLeasedCompressorsCount(codec));
+      assertEquals(leasedCompressorsBefore, CodecPool.getLeasedCompressorsCount(codec),
+          "leased compressor not returned to the codec pool");
     }
     LOG.info("Finished compressing data");
     
@@ -247,8 +250,8 @@ public class TestCodec {
         RandomDatum v2 = new RandomDatum();
         k2.readFields(inflateIn);
         v2.readFields(inflateIn);
-        assertTrue("original and compressed-then-decompressed-output not equal",
-                   k1.equals(k2) && v1.equals(v2));
+        assertTrue(k1.equals(k2) && v1.equals(v2),
+            "original and compressed-then-decompressed-output not equal");
       
         // original and compressed-then-decompressed-output have the same
         // hashCode
@@ -256,14 +259,14 @@ public class TestCodec {
         m.put(k1, k1.toString());
         m.put(v1, v1.toString());
         String result = m.get(k2);
-        assertEquals("k1 and k2 hashcode not equal", result, k1.toString());
+        assertEquals(result, k1.toString(), "k1 and k2 hashcode not equal");
         result = m.get(v2);
-        assertEquals("v1 and v2 hashcode not equal", result, v1.toString());
+        assertEquals(result, v1.toString(), "v1 and v2 hashcode not equal");
       }
     }
-    assertEquals("leased decompressor not returned to the codec pool",
-        leasedDecompressorsBefore,
-        CodecPool.getLeasedDecompressorsCount(codec));
+    assertEquals(leasedDecompressorsBefore,
+        CodecPool.getLeasedDecompressorsCount(codec),
+        "leased decompressor not returned to the codec pool");
 
     // De-compress data byte-at-a-time
     originalData.reset(data.getData(), 0, data.getLength());
@@ -278,8 +281,8 @@ public class TestCodec {
       int expected;
       do {
         expected = originalIn.read();
-        assertEquals("Inflated stream read by byte does not match",
-            expected, inflateFilter.read());
+        assertEquals(expected, inflateFilter.read(),
+            "Inflated stream read by byte does not match");
       } while (expected != -1);
     }
 
@@ -334,7 +337,7 @@ public class TestCodec {
           break;
         }
         final int seq2 = readLeadingInt(line);
-        assertEquals("Mismatched lines", seq1 + 1, seq2);
+        assertEquals(seq1 + 1, seq2, "Mismatched lines");
       }
     } finally {
       CodecPool.returnDecompressor(dcmp);
@@ -396,7 +399,7 @@ public class TestCodec {
     Compressor c2 = CodecPool.getCompressor(dfc);
     CodecPool.returnCompressor(c1);
     CodecPool.returnCompressor(c2);
-    assertTrue("Got mismatched ZlibCompressor", c2 != CodecPool.getCompressor(gzc));
+    assertTrue(c2 != CodecPool.getCompressor(gzc), "Got mismatched ZlibCompressor");
   }
 
   private static void gzipReinitTest(Configuration conf, CompressionCodec codec)
@@ -411,7 +414,7 @@ public class TestCodec {
     ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
     Compressor c2 = CodecPool.getCompressor(codec, conf);
     // ensure same compressor placed earlier
-    assertTrue("Got mismatched ZlibCompressor", c1 == c2);
+    assertTrue(c1 == c2, "Got mismatched ZlibCompressor");
     ByteArrayOutputStream bos = new ByteArrayOutputStream();
     CompressionOutputStream cos = null;
     // write trivially compressable data
@@ -428,8 +431,8 @@ public class TestCodec {
     }
     byte[] outbytes = bos.toByteArray();
     // verify data were not compressed
-    assertTrue("Compressed bytes contrary to configuration",
-               outbytes.length >= b.length);
+    assertTrue(outbytes.length >= b.length,
+        "Compressed bytes contrary to configuration");
   }
 
   private static void codecTestWithNOCompression (Configuration conf,
@@ -463,8 +466,8 @@ public class TestCodec {
     }
     byte[] outbytes = bos.toByteArray();
     // verify data were not compressed
-    assertTrue("Compressed bytes contrary to configuration(NO_COMPRESSION)",
-               outbytes.length >= b.length);
+    assertTrue(outbytes.length >= b.length,
+        "Compressed bytes contrary to configuration(NO_COMPRESSION)");
   }
 
   @Test
@@ -509,7 +512,8 @@ public class TestCodec {
     sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.DefaultCodec", 1000000);
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundException,
       InstantiationException, IllegalAccessException {
     Configuration conf = new Configuration();
@@ -519,7 +523,8 @@ public class TestCodec {
     sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000);
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testSequenceFileZStandardCodec() throws Exception {
     assumeTrue(ZStandardCodec.isNativeCodeLoaded());
     Configuration conf = new Configuration();
@@ -531,7 +536,8 @@ public class TestCodec {
         "org.apache.hadoop.io.compress.ZStandardCodec", 1000000);
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testSequenceFileBZip2NativeCodec() throws IOException, 
                         ClassNotFoundException, InstantiationException, 
                         IllegalAccessException {
@@ -954,9 +960,9 @@ public class TestCodec {
     ZlibFactory.setNativeZlibLoaded(false);
     // Ensure that the CodecPool has a BuiltInZlibInflater in it.
     Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
-    assertNotNull("zlibDecompressor is null!", zlibDecompressor);
-    assertTrue("ZlibFactory returned unexpected inflator",
-        zlibDecompressor instanceof BuiltInZlibInflater);
+    assertNotNull(zlibDecompressor, "zlibDecompressor is null!");
+    assertTrue(zlibDecompressor instanceof BuiltInZlibInflater,
+        "ZlibFactory returned unexpected inflator");
     CodecPool.returnDecompressor(zlibDecompressor);
 
     // Now create a GZip text file.
@@ -977,7 +983,7 @@ public class TestCodec {
     is = codec.createInputStream(is, decompressor);
     BufferedReader br = new BufferedReader(new InputStreamReader(is));
     String line = br.readLine();
-    assertEquals("Didn't get the same message back!", msg, line);
+    assertEquals(msg, line, "Didn't get the same message back!");
     br.close();
   }
 
@@ -986,7 +992,7 @@ public class TestCodec {
         new GZIPInputStream(new FileInputStream(filename))));
     try {
       String line = r.readLine();
-      assertEquals("Got invalid line back from " + filename, msg, line);
+      assertEquals(msg, line, "Got invalid line back from " + filename);
     } finally {
       r.close();
       new File(filename).delete();
@@ -1000,14 +1006,14 @@ public class TestCodec {
     // Don't use native libs for this test.
     Configuration conf = new Configuration();
     ZlibFactory.setNativeZlibLoaded(false);
-    assertFalse("ZlibFactory is using native libs against request",
-        ZlibFactory.isNativeZlibLoaded(conf));
+    assertFalse(ZlibFactory.isNativeZlibLoaded(conf),
+        "ZlibFactory is using native libs against request");
 
     // Ensure that the CodecPool has a BuiltInZlibInflater in it.
     Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
-    assertNotNull("zlibDecompressor is null!", zlibDecompressor);
-    assertTrue("ZlibFactory returned unexpected inflator",
-        zlibDecompressor instanceof BuiltInZlibInflater);
+    assertNotNull(zlibDecompressor, "zlibDecompressor is null!");
+    assertTrue(zlibDecompressor instanceof BuiltInZlibInflater,
+        "ZlibFactory returned unexpected inflator");
     CodecPool.returnDecompressor(zlibDecompressor);
 
     // Now create a GZip text file.
@@ -1034,9 +1040,9 @@ public class TestCodec {
     BufferedReader br = new BufferedReader(new InputStreamReader(is));
     for (int j = 0; j < NBUF; j++) {
       int n = br.read(buf);
-      assertEquals("got wrong read length!", n, buf.length);
+      assertEquals(n, buf.length, "got wrong read length!");
       for (int i = 0; i < buf.length; i++)
-        assertEquals("got wrong byte!", buf[i], '\0');
+        assertEquals(buf[i], '\0', "got wrong byte!");
     }
     br.close();
   }
@@ -1050,24 +1056,24 @@ public class TestCodec {
     if (useNative) {
       assumeTrue(ZlibFactory.isNativeZlibLoaded(hadoopConf));
     } else {
-      assertFalse("ZlibFactory is using native libs against request",
-          ZlibFactory.isNativeZlibLoaded(hadoopConf));
+      assertFalse(ZlibFactory.isNativeZlibLoaded(hadoopConf),
+          "ZlibFactory is using native libs against request");
     }
 
     // Ensure that the CodecPool has a BuiltInZlibDeflater in it.
     Compressor zlibCompressor = ZlibFactory.getZlibCompressor(hadoopConf);
-    assertNotNull("zlibCompressor is null!", zlibCompressor);
-    assertTrue("ZlibFactory returned unexpected deflator",
-          useNative ? zlibCompressor instanceof ZlibCompressor
-                    : zlibCompressor instanceof BuiltInZlibDeflater);
+    assertNotNull(zlibCompressor, "zlibCompressor is null!");
+    assertTrue(useNative ? zlibCompressor instanceof ZlibCompressor
+        : zlibCompressor instanceof BuiltInZlibDeflater,
+        "ZlibFactory returned unexpected deflator");
 
     CodecPool.returnCompressor(zlibCompressor);
 
     // Create a GZIP text file via the Compressor interface.
     CompressionCodecFactory ccf = new CompressionCodecFactory(hadoopConf);
     CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
-    assertTrue("Codec for .gz file is not GzipCodec", 
-               codec instanceof GzipCodec);
+    assertTrue(codec instanceof GzipCodec,
+        "Codec for .gz file is not GzipCodec");
 
     final String fileName = new Path(GenericTestUtils.getTempPath(
         "testGzipCodecWrite.txt.gz")).toString();
@@ -1127,20 +1133,20 @@ public class TestCodec {
     // Don't use native libs for this test.
     Configuration conf = new Configuration();
     ZlibFactory.setNativeZlibLoaded(false);
-    assertFalse("ZlibFactory is using native libs against request",
-            ZlibFactory.isNativeZlibLoaded(conf));
+    assertFalse(ZlibFactory.isNativeZlibLoaded(conf),
+        "ZlibFactory is using native libs against request");
 
     // This should give us a BuiltInZlibDeflater.
     Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
-    assertNotNull("zlibCompressor is null!", zlibCompressor);
-    assertTrue("ZlibFactory returned unexpected deflator",
-            zlibCompressor instanceof BuiltInZlibDeflater);
+    assertNotNull(zlibCompressor, "zlibCompressor is null!");
+    assertTrue(zlibCompressor instanceof BuiltInZlibDeflater,
+        "ZlibFactory returned unexpected deflator");
     // its createOutputStream() just wraps the existing stream in a
     // java.util.zip.GZIPOutputStream.
     CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
     CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
-    assertTrue("Codec for .gz file is not GzipCodec",
-            codec instanceof GzipCodec);
+    assertTrue(codec instanceof GzipCodec,
+        "Codec for .gz file is not GzipCodec");
 
     // make sure we don't get a null compressor
     Compressor codecCompressor = codec.createCompressor();
@@ -1177,20 +1183,20 @@ public class TestCodec {
     // Don't use native libs for this test.
     Configuration conf = new Configuration();
     ZlibFactory.setNativeZlibLoaded(false);
-    assertFalse("ZlibFactory is using native libs against request",
-                ZlibFactory.isNativeZlibLoaded(conf));
+    assertFalse(ZlibFactory.isNativeZlibLoaded(conf),
+        "ZlibFactory is using native libs against request");
 
     // This should give us a BuiltInZlibInflater.
     Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
-    assertNotNull("zlibDecompressor is null!", zlibDecompressor);
-    assertTrue("ZlibFactory returned unexpected inflator",
-	       zlibDecompressor instanceof BuiltInZlibInflater);
+    assertNotNull(zlibDecompressor, "zlibDecompressor is null!");
+    assertTrue(zlibDecompressor instanceof BuiltInZlibInflater,
+        "ZlibFactory returned unexpected inflator");
     // its createOutputStream() just wraps the existing stream in a
     // java.util.zip.GZIPOutputStream.
     CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
     CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
-    assertTrue("Codec for .gz file is not GzipCodec", 
-	       codec instanceof GzipCodec);
+    assertTrue(codec instanceof GzipCodec,
+        "Codec for .gz file is not GzipCodec");
 
     // make sure we don't get a null decompressor
     Decompressor codecDecompressor = codec.createDecompressor();
@@ -1219,7 +1225,8 @@ public class TestCodec {
     }
   }
 
-  @Test(timeout=20000)
+  @Test
+  @Timeout(value = 20)
   public void testGzipCompressorWithEmptyInput() throws IOException {
     // don't use native libs
     ZlibFactory.setNativeZlibLoaded(false);

+ 11 - 12
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java

@@ -27,10 +27,10 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.conf.Configuration;
 
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class TestCodecFactory {
 
@@ -139,13 +139,12 @@ public class TestCodecFactory {
   private static void checkCodec(String msg, 
                                  Class expected, CompressionCodec actual) {
     if (expected == null) {
-      assertNull(msg, actual);
+      assertNull(actual, msg);
     } else if (actual == null) {
       fail(msg + " result was null");
     } else {
-      assertEquals(msg + " unexpected codec found",
-              expected.getName(),
-              actual.getClass().getName());
+      assertEquals(expected.getName(),
+          actual.getClass().getName(), msg + " unexpected codec found");
     }
   }
 
@@ -154,9 +153,9 @@ public class TestCodecFactory {
     CompressionCodecFactory factory =
             new CompressionCodecFactory(new Configuration());
     CompressionCodec codec = factory.getCodec(new Path("/tmp/foo.bar"));
-    assertEquals("default factory foo codec", null, codec);
+    assertEquals(null, codec, "default factory foo codec");
     codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName());
-    assertEquals("default factory foo codec", null, codec);
+    assertEquals(null, codec, "default factory foo codec");
     
     codec = factory.getCodec(new Path("/tmp/foo.gz"));
     checkCodec("default factory for .gz", GzipCodec.class, codec);
@@ -204,9 +203,9 @@ public class TestCodecFactory {
     factory = setClasses(new Class[0]);
     // gz, bz2, snappy, lz4 are picked up by service loader, but bar isn't
     codec = factory.getCodec(new Path("/tmp/foo.bar"));
-    assertEquals("empty factory bar codec", null, codec);
+    assertEquals(null, codec, "empty factory bar codec");
     codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName());
-    assertEquals("empty factory bar codec", null, codec);
+    assertEquals(null, codec, "empty factory bar codec");
     
     codec = factory.getCodec(new Path("/tmp/foo.gz"));
     checkCodec("empty factory gz codec", GzipCodec.class, codec);

+ 47 - 32
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java

@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.io.compress;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -38,8 +38,9 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.test.LambdaTestUtils;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 import java.util.HashSet;
 import java.util.Set;
@@ -49,34 +50,40 @@ public class TestCodecPool {
       "Incorrect number of leased (de)compressors";
   DefaultCodec codec;
 
-  @Before
+  @BeforeEach
   public void setup() {
     this.codec = new DefaultCodec();
     this.codec.setConf(new Configuration());
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testCompressorPoolCounts() {
     // Get two compressors and return them
     Compressor comp1 = CodecPool.getCompressor(codec);
     Compressor comp2 = CodecPool.getCompressor(codec);
-    assertEquals(LEASE_COUNT_ERR, 2,
-        CodecPool.getLeasedCompressorsCount(codec));
+    assertEquals(2,
+        CodecPool.getLeasedCompressorsCount(codec),
+        LEASE_COUNT_ERR);
 
     CodecPool.returnCompressor(comp2);
-    assertEquals(LEASE_COUNT_ERR, 1,
-        CodecPool.getLeasedCompressorsCount(codec));
+    assertEquals(1,
+        CodecPool.getLeasedCompressorsCount(codec),
+        LEASE_COUNT_ERR);
 
     CodecPool.returnCompressor(comp1);
-    assertEquals(LEASE_COUNT_ERR, 0,
-        CodecPool.getLeasedCompressorsCount(codec));
+    assertEquals(0,
+        CodecPool.getLeasedCompressorsCount(codec),
+        LEASE_COUNT_ERR);
 
     CodecPool.returnCompressor(comp1);
-    assertEquals(LEASE_COUNT_ERR, 0,
-        CodecPool.getLeasedCompressorsCount(codec));
+    assertEquals(0,
+        CodecPool.getLeasedCompressorsCount(codec),
+        LEASE_COUNT_ERR);
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testCompressorNotReturnSameInstance() {
     Compressor comp = CodecPool.getCompressor(codec);
     CodecPool.returnCompressor(comp);
@@ -91,7 +98,8 @@ public class TestCodecPool {
     }
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testCompressorConf() throws Exception {
     DefaultCodec codec1 = new DefaultCodec();
     Configuration conf = new Configuration();
@@ -121,28 +129,30 @@ public class TestCodecPool {
     CodecPool.returnCompressor(comp2);
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testDecompressorPoolCounts() {
     // Get two decompressors and return them
     Decompressor decomp1 = CodecPool.getDecompressor(codec);
     Decompressor decomp2 = CodecPool.getDecompressor(codec);
-    assertEquals(LEASE_COUNT_ERR, 2,
-        CodecPool.getLeasedDecompressorsCount(codec));
+    assertEquals(2,
+        CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR);
 
     CodecPool.returnDecompressor(decomp2);
-    assertEquals(LEASE_COUNT_ERR, 1,
-        CodecPool.getLeasedDecompressorsCount(codec));
+    assertEquals(1,
+        CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR);
 
     CodecPool.returnDecompressor(decomp1);
-    assertEquals(LEASE_COUNT_ERR, 0,
-        CodecPool.getLeasedDecompressorsCount(codec));
+    assertEquals(0,
+        CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR);
 
     CodecPool.returnDecompressor(decomp1);
-    assertEquals(LEASE_COUNT_ERR, 0,
-        CodecPool.getLeasedCompressorsCount(codec));
+    assertEquals(0,
+        CodecPool.getLeasedCompressorsCount(codec), LEASE_COUNT_ERR);
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testMultiThreadedCompressorPool() throws InterruptedException {
     final int iterations = 4;
     ExecutorService threadpool = Executors.newFixedThreadPool(3);
@@ -176,10 +186,12 @@ public class TestCodecPool {
     threadpool.shutdown();
     threadpool.awaitTermination(1000, TimeUnit.SECONDS);
 
-    assertEquals(LEASE_COUNT_ERR, 0, CodecPool.getLeasedCompressorsCount(codec));
+    assertEquals(0, CodecPool.getLeasedCompressorsCount(codec),
+        LEASE_COUNT_ERR);
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testMultiThreadedDecompressorPool() throws InterruptedException {
     final int iterations = 4;
     ExecutorService threadpool = Executors.newFixedThreadPool(3);
@@ -213,11 +225,12 @@ public class TestCodecPool {
     threadpool.shutdown();
     threadpool.awaitTermination(1000, TimeUnit.SECONDS);
 
-    assertEquals(LEASE_COUNT_ERR, 0,
-        CodecPool.getLeasedDecompressorsCount(codec));
+    assertEquals(0,
+        CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR);
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testDecompressorNotReturnSameInstance() {
     Decompressor decomp = CodecPool.getDecompressor(codec);
     CodecPool.returnDecompressor(decomp);
@@ -232,7 +245,8 @@ public class TestCodecPool {
     }
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testDoNotPoolCompressorNotUseableAfterReturn() throws Exception {
 
     final GzipCodec gzipCodec = new GzipCodec();
@@ -252,7 +266,8 @@ public class TestCodecPool {
         () -> outputStream.write(1));
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testDoNotPoolDecompressorNotUseableAfterReturn() throws Exception {
 
     final GzipCodec gzipCodec = new GzipCodec();

+ 5 - 6
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java

@@ -33,12 +33,12 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 public class TestCompressionStreamReuse {
   private static final Logger LOG = LoggerFactory
@@ -165,9 +165,8 @@ public class TestCompressionStreamReuse {
       RandomDatum v2 = new RandomDatum();
       k2.readFields(inflateIn);
       v2.readFields(inflateIn);
-      assertTrue(
-          "original and compressed-then-decompressed-output not equal",
-          k1.equals(k2) && v1.equals(v2));
+      assertTrue(k1.equals(k2) && v1.equals(v2),
+          "original and compressed-then-decompressed-output not equal");
     }
     LOG.info("SUCCESS! Completed checking " + count + " records");
   }

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorDecompressor.java

@@ -26,7 +26,7 @@ import org.apache.hadoop.io.compress.snappy.SnappyDecompressor;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
 
 /** 

+ 6 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorStream.java

@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.io.compress;
 
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-public class TestCompressorStream extends CompressorStream{
+public class TestCompressorStream extends CompressorStream {
   
   private static FileOutputStream fop = null;
   private static File file = null;
@@ -67,8 +68,8 @@ public class TestCompressorStream extends CompressorStream{
     catch(IOException e) {
       System.out.println("Expected IOException");
     }
-    Assert.assertTrue("closed shoud be true", 
-        ((CompressorStream)testCompressorStream).closed);
+    assertTrue(
+        ((CompressorStream)testCompressorStream).closed, "closed shoud be true");
     //cleanup after test case
     file.delete();
   }

+ 5 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestDecompressorStream.java

@@ -18,15 +18,15 @@
 package org.apache.hadoop.io.compress;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.ByteArrayInputStream;
 import java.io.EOFException;
 import java.io.IOException;
 
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class TestDecompressorStream {
   private static final String TEST_STRING =
@@ -36,7 +36,7 @@ public class TestDecompressorStream {
   private Decompressor decompressor;
   private DecompressorStream decompressorStream;
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     bytesIn = new ByteArrayInputStream(TEST_STRING.getBytes());
     decompressor = new FakeDecompressor();

+ 8 - 8
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestGzipCodec.java

@@ -32,10 +32,10 @@ import org.apache.hadoop.io.DataOutputBuffer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * Verify resettable compressor.
@@ -49,7 +49,7 @@ public class TestGzipCodec {
   private static final String DATA2 = "It's baconnnn!!\n";
   private GzipCodec codec = new GzipCodec();
 
-  @Before
+  @BeforeEach
   public void setUp() {
     codec.setConf(new Configuration(false));
   }
@@ -68,7 +68,7 @@ public class TestGzipCodec {
     byte[] buf = new byte[1024];
     int len = cmpIn.read(buf);
     String result = new String(buf, 0, len, StandardCharsets.UTF_8);
-    assertEquals("Input must match output", DATA1, result);
+    assertEquals(DATA1, result, "Input must match output");
   }
 
   // Test multi-member gzip file created via finish(), resetState().
@@ -97,7 +97,7 @@ public class TestGzipCodec {
       }
       result.append(new String(buf, 0, len, StandardCharsets.UTF_8));
     }
-    assertEquals("Output must match input", DATA1 + DATA2, result.toString());
+    assertEquals(DATA1 + DATA2, result.toString(), "Output must match input");
   }
 
   // ensure all necessary methods are overwritten
@@ -164,6 +164,6 @@ public class TestGzipCodec {
       }
       result.append(new String(buf, 0, len, StandardCharsets.UTF_8));
     }
-    assertEquals("Output must match input", DATA1, result.toString());
+    assertEquals(DATA1, result.toString(), "Output must match input");
   }
 }

+ 6 - 6
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBZip2TextFileWriter.java

@@ -22,12 +22,12 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.List;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import static org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter.BLOCK_SIZE;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public final class TestBZip2TextFileWriter {
 
@@ -36,13 +36,13 @@ public final class TestBZip2TextFileWriter {
   private ByteArrayOutputStream rawOut;
   private BZip2TextFileWriter writer;
 
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     rawOut = new ByteArrayOutputStream();
     writer = new BZip2TextFileWriter(rawOut);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws Exception {
     rawOut = null;
     writer.close();

+ 18 - 16
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBzip2CompressorDecompressor.java

@@ -21,20 +21,23 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.bzip2.Bzip2Compressor;
 import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor;
 import org.apache.hadoop.test.MultithreadedTestUtil;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.*;
 import java.util.Random;
 
-import static org.junit.Assert.*;
-import static org.junit.Assume.*;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 public class TestBzip2CompressorDecompressor {
   
   private static final Random rnd = new Random(12345l);
 
-  @Before
+  @BeforeEach
   public void before() {
     assumeTrue(Bzip2Factory.isNativeBzip2Loaded(new Configuration()));
   }
@@ -49,25 +52,24 @@ public class TestBzip2CompressorDecompressor {
     try {
       Bzip2Compressor compressor = new Bzip2Compressor();
       Bzip2Decompressor decompressor = new Bzip2Decompressor();
-      assertFalse("testBzip2CompressDecompress finished error",
-          compressor.finished());
+      assertFalse(compressor.finished(),
+          "testBzip2CompressDecompress finished error");
       compressor.setInput(rawData, 0, rawData.length);
-      assertTrue("testBzip2CompressDecompress getBytesRead before error",
-          compressor.getBytesRead() == 0);
+      assertTrue(compressor.getBytesRead() == 0,
+          "testBzip2CompressDecompress getBytesRead before error");
       compressor.finish();
 
       byte[] compressedResult = new byte[rawDataSize];
       int cSize = compressor.compress(compressedResult, 0, rawDataSize);
-      assertTrue("testBzip2CompressDecompress getBytesRead after error",
-          compressor.getBytesRead() == rawDataSize);
-      assertTrue(
-          "testBzip2CompressDecompress compressed size no less than original size",
-          cSize < rawDataSize);
+      assertTrue(compressor.getBytesRead() == rawDataSize,
+          "testBzip2CompressDecompress getBytesRead after error");
+      assertTrue(cSize < rawDataSize,
+          "testBzip2CompressDecompress compressed size no less than original size");
       decompressor.setInput(compressedResult, 0, cSize);
       byte[] decompressedBytes = new byte[rawDataSize];
       decompressor.decompress(decompressedBytes, 0, decompressedBytes.length);
-      assertArrayEquals("testBzip2CompressDecompress arrays not equals ",
-          rawData, decompressedBytes);
+      assertArrayEquals(rawData, decompressedBytes,
+          "testBzip2CompressDecompress arrays not equals ");
       compressor.reset();
       decompressor.reset();
     } catch (IOException ex) {

+ 22 - 21
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/lz4/TestLz4CompressorDecompressor.java

@@ -17,7 +17,11 @@
  */
 package org.apache.hadoop.io.compress.lz4;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.ByteArrayInputStream;
@@ -41,8 +45,7 @@ import org.apache.hadoop.io.compress.CompressionOutputStream;
 import org.apache.hadoop.io.compress.lz4.Lz4Compressor;
 import org.apache.hadoop.io.compress.lz4.Lz4Decompressor;
 import org.apache.hadoop.test.MultithreadedTestUtil;
-import org.junit.Test;
-import static org.junit.Assume.*;
+import org.junit.jupiter.api.Test;
 
 public class TestLz4CompressorDecompressor {
   
@@ -175,13 +178,12 @@ public class TestLz4CompressorDecompressor {
     try {
       Lz4Compressor compressor = new Lz4Compressor();
       byte[] bytes = generate(BYTES_SIZE);
-      assertTrue("needsInput error !!!", compressor.needsInput());
+      assertTrue(compressor.needsInput(), "needsInput error !!!");
       compressor.setInput(bytes, 0, bytes.length);
       byte[] emptyBytes = new byte[BYTES_SIZE];
       int csize = compressor.compress(emptyBytes, 0, bytes.length);
-      assertTrue(
-          "testSetInputWithBytesSizeMoreThenDefaultLz4CompressorByfferSize error !!!",
-          csize != 0);
+      assertTrue(csize != 0,
+          "testSetInputWithBytesSizeMoreThenDefaultLz4CompressorByfferSize error !!!");
     } catch (Exception ex) {
       fail("testSetInputWithBytesSizeMoreThenDefaultLz4CompressorByfferSize ex error !!!");
     }
@@ -195,28 +197,27 @@ public class TestLz4CompressorDecompressor {
     Lz4Compressor compressor = new Lz4Compressor();
     try {
       compressor.setInput(bytes, 0, bytes.length);
-      assertTrue("Lz4CompressDecompress getBytesRead error !!!",
-          compressor.getBytesRead() > 0);
-      assertTrue(
-          "Lz4CompressDecompress getBytesWritten before compress error !!!",
-          compressor.getBytesWritten() == 0);
+      assertTrue(compressor.getBytesRead() > 0,
+          "Lz4CompressDecompress getBytesRead error !!!");
+      assertTrue(compressor.getBytesWritten() == 0,
+          "Lz4CompressDecompress getBytesWritten before compress error !!!");
 
       byte[] compressed = new byte[BYTE_SIZE];
       int cSize = compressor.compress(compressed, 0, compressed.length);
-      assertTrue(
-          "Lz4CompressDecompress getBytesWritten after compress error !!!",
-          compressor.getBytesWritten() > 0);
+      assertTrue(compressor.getBytesWritten() > 0,
+          "Lz4CompressDecompress getBytesWritten after compress error !!!");
       Lz4Decompressor decompressor = new Lz4Decompressor();
       // set as input for decompressor only compressed data indicated with cSize
       decompressor.setInput(compressed, 0, cSize);
       byte[] decompressed = new byte[BYTE_SIZE];
       decompressor.decompress(decompressed, 0, decompressed.length);
 
-      assertTrue("testLz4CompressDecompress finished error !!!", decompressor.finished());      
+      assertTrue(decompressor.finished(), "testLz4CompressDecompress finished error !!!");
       assertArrayEquals(bytes, decompressed);
       compressor.reset();
       decompressor.reset();
-      assertTrue("decompressor getRemaining error !!!",decompressor.getRemaining() == 0);
+      assertTrue(decompressor.getRemaining() == 0,
+          "decompressor getRemaining error !!!");
     } catch (Exception e) {
       fail("testLz4CompressDecompress ex error!!!");
     }
@@ -238,14 +239,14 @@ public class TestLz4CompressorDecompressor {
       blockCompressorStream.close();
       // check compressed output
       buf = bytesOut.toByteArray();
-      assertEquals("empty stream compressed output size != 4", 4, buf.length);
+      assertEquals(4, buf.length, "empty stream compressed output size != 4");
       // use compressed output as input for decompression
       bytesIn = new ByteArrayInputStream(buf);
       // create decompression stream
       blockDecompressorStream = new BlockDecompressorStream(bytesIn,
           new Lz4Decompressor(), 1024);
       // no byte is available because stream was closed
-      assertEquals("return value is not -1", -1, blockDecompressorStream.read());
+      assertEquals(-1, blockDecompressorStream.read(), "return value is not -1");
     } catch (Exception e) {
       fail("testCompressorDecompressorEmptyStreamLogic ex error !!!"
           + e.getMessage());
@@ -291,8 +292,8 @@ public class TestLz4CompressorDecompressor {
       byte[] result = new byte[BYTE_SIZE];
       inflateIn.read(result);
 
-      assertArrayEquals("original array not equals compress/decompressed array", result,
-          bytes);
+      assertArrayEquals(result,
+          bytes, "original array not equals compress/decompressed array");
     } catch (IOException e) {
       fail("testLz4CompressorDecopressorLogicWithCompressionStreams ex error !!!");
     } finally {

+ 24 - 27
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/snappy/TestSnappyCompressorDecompressor.java

@@ -18,9 +18,10 @@
 package org.apache.hadoop.io.compress.snappy;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
@@ -42,9 +43,8 @@ import org.apache.hadoop.io.compress.CompressionInputStream;
 import org.apache.hadoop.io.compress.CompressionOutputStream;
 import org.apache.hadoop.io.compress.snappy.SnappyDecompressor.SnappyDirectDecompressor;
 import org.apache.hadoop.test.MultithreadedTestUtil;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -53,7 +53,7 @@ public class TestSnappyCompressorDecompressor {
   public static final Logger LOG =
       LoggerFactory.getLogger(TestSnappyCompressorDecompressor.class);
 
-  @Before
+  @BeforeEach
   public void before() {
   }
 
@@ -175,11 +175,10 @@ public class TestSnappyCompressorDecompressor {
     byte[] bytes = BytesGenerator.get(BYTE_SIZE);
     SnappyCompressor compressor = new SnappyCompressor();
     compressor.setInput(bytes, 0, bytes.length);
-    assertTrue("SnappyCompressDecompress getBytesRead error !!!",
-        compressor.getBytesRead() > 0);
-    assertEquals(
-        "SnappyCompressDecompress getBytesWritten before compress error !!!",
-        0, compressor.getBytesWritten());
+    assertTrue(compressor.getBytesRead() > 0,
+        "SnappyCompressDecompress getBytesRead error !!!");
+    assertEquals(0, compressor.getBytesWritten(),
+        "SnappyCompressDecompress getBytesWritten before compress error !!!");
 
     // snappy compression may increase data size.
     // This calculation comes from "Snappy::MaxCompressedLength(size_t)"
@@ -188,9 +187,8 @@ public class TestSnappyCompressorDecompressor {
     int cSize = compressor.compress(compressed, 0, compressed.length);
     LOG.info("input size: {}", BYTE_SIZE);
     LOG.info("compressed size: {}", cSize);
-    assertTrue(
-        "SnappyCompressDecompress getBytesWritten after compress error !!!",
-        compressor.getBytesWritten() > 0);
+    assertTrue(compressor.getBytesWritten() > 0,
+        "SnappyCompressDecompress getBytesWritten after compress error !!!");
 
     SnappyDecompressor decompressor = new SnappyDecompressor();
     // set as input for decompressor only compressed data indicated with cSize
@@ -198,13 +196,13 @@ public class TestSnappyCompressorDecompressor {
     byte[] decompressed = new byte[BYTE_SIZE];
     decompressor.decompress(decompressed, 0, decompressed.length);
 
-    assertTrue("testSnappyCompressDecompress finished error !!!",
-        decompressor.finished());
-    Assert.assertArrayEquals(bytes, decompressed);
+    assertTrue(decompressor.finished(),
+        "testSnappyCompressDecompress finished error !!!");
+    assertArrayEquals(bytes, decompressed);
     compressor.reset();
     decompressor.reset();
-    assertEquals("decompressor getRemaining error !!!",
-        0, decompressor.getRemaining());
+    assertEquals(0, decompressor.getRemaining(),
+        "decompressor getRemaining error !!!");
   }
 
   @Test
@@ -223,7 +221,7 @@ public class TestSnappyCompressorDecompressor {
 
       // check compressed output
       buf = bytesOut.toByteArray();
-      assertEquals("empty stream compressed output size != 4", 4, buf.length);
+      assertEquals(4, buf.length, "empty stream compressed output size != 4");
 
       // use compressed output as input for decompression
       bytesIn = new ByteArrayInputStream(buf);
@@ -233,7 +231,7 @@ public class TestSnappyCompressorDecompressor {
           new SnappyDecompressor(), 1024);
 
       // no byte is available because stream was closed
-      assertEquals("return value is not -1", -1, blockDecompressorStream.read());
+      assertEquals(-1, blockDecompressorStream.read(), "return value is not -1");
     } catch (Exception e) {
       fail("testCompressorDecompressorEmptyStreamLogic ex error !!!"
           + e.getMessage());
@@ -276,8 +274,8 @@ public class TestSnappyCompressorDecompressor {
           len -= bufLen;
         } while (len > 0);
       }
-      assertTrue("testSnappyBlockCompression error !!!",
-          out.toByteArray().length > 0);
+      assertTrue(out.toByteArray().length > 0,
+          "testSnappyBlockCompression error !!!");
     } catch (Exception ex) {
       fail("testSnappyBlockCompression ex error !!!");
     }
@@ -397,9 +395,8 @@ public class TestSnappyCompressorDecompressor {
       byte[] result = new byte[BYTE_SIZE];
       inflateIn.read(result);
 
-      Assert.assertArrayEquals(
-          "original array not equals compress/decompressed array", result,
-          bytes);
+      assertArrayEquals(result, bytes,
+          "original array not equals compress/decompressed array");
     } catch (IOException e) {
       fail("testSnappyCompressorDecopressorLogicWithCompressionStreams ex error !!!");
     } finally {

+ 43 - 42
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java

@@ -17,8 +17,12 @@
  */
 package org.apache.hadoop.io.compress.zlib;
 
-import static org.junit.Assert.*;
-import static org.junit.Assume.*;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -39,15 +43,15 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.io.compress.zlib.ZlibDecompressor.ZlibDirectDecompressor;
 import org.apache.hadoop.test.MultithreadedTestUtil;
 import org.apache.hadoop.util.NativeCodeLoader;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
 
 public class TestZlibCompressorDecompressor {
 
   private static final Random random = new Random(12345L);
 
-  @Before
+  @BeforeEach
   public void before() {
     assumeTrue(ZlibFactory.isNativeZlibLoaded(new Configuration()));
   }  
@@ -115,8 +119,8 @@ public class TestZlibCompressorDecompressor {
         fail("testZlibCompressorDecompressorWithConfiguration ex error " + ex);
       }
     } else {
-      assertTrue("ZlibFactory is using native libs against request",
-          ZlibFactory.isNativeZlibLoaded(conf));
+      assertTrue(ZlibFactory.isNativeZlibLoaded(conf),
+          "ZlibFactory is using native libs against request");
     }
   }
 
@@ -140,8 +144,8 @@ public class TestZlibCompressorDecompressor {
         fail("testZlibCompressorDecompressorWithConfiguration ex error " + ex);
       }
     } else {
-      assertTrue("ZlibFactory is using native libs against request",
-              ZlibFactory.isNativeZlibLoaded(conf));
+      assertTrue(ZlibFactory.isNativeZlibLoaded(conf),
+          "ZlibFactory is using native libs against request");
     }
   }
 
@@ -154,33 +158,31 @@ public class TestZlibCompressorDecompressor {
     try {
       ZlibCompressor compressor = new ZlibCompressor();
       ZlibDecompressor decompressor = new ZlibDecompressor();
-      assertFalse("testZlibCompressDecompress finished error",
-          compressor.finished());
+      assertFalse(compressor.finished(),
+          "testZlibCompressDecompress finished error");
       compressor.setInput(rawData, 0, rawData.length);
-      assertTrue("testZlibCompressDecompress getBytesRead before error",
-          compressor.getBytesRead() == 0);
+      assertTrue(compressor.getBytesRead() == 0,
+          "testZlibCompressDecompress getBytesRead before error");
       compressor.finish();
 
       byte[] compressedResult = new byte[rawDataSize];
       int cSize = compressor.compress(compressedResult, 0, rawDataSize);
-      assertTrue("testZlibCompressDecompress getBytesRead ather error",
-          compressor.getBytesRead() == rawDataSize);
-      assertTrue(
-          "testZlibCompressDecompress compressed size no less then original size",
-          cSize < rawDataSize);
+      assertTrue(compressor.getBytesRead() == rawDataSize,
+          "testZlibCompressDecompress getBytesRead ather error");
+      assertTrue(cSize < rawDataSize,
+          "testZlibCompressDecompress compressed size no less then original size");
       decompressor.setInput(compressedResult, 0, cSize);
       byte[] decompressedBytes = new byte[rawDataSize];
       decompressor.decompress(decompressedBytes, 0, decompressedBytes.length);
-      assertArrayEquals("testZlibCompressDecompress arrays not equals ",
-          rawData, decompressedBytes);
+      assertArrayEquals(rawData, decompressedBytes,
+          "testZlibCompressDecompress arrays not equals ");
       compressor.reset();
       decompressor.reset();
     } catch (IOException ex) {
       fail("testZlibCompressDecompress ex !!!" + ex);
     }
   }
-  
-  
+
   private void compressDecompressLoop(int rawDataSize) throws IOException {
     byte[] rawData = null;
     rawData = generate(rawDataSize);
@@ -247,8 +249,8 @@ public class TestZlibCompressorDecompressor {
       checkSetDictionaryArrayIndexOutOfBoundsException(zlibDecompressor);
       checkSetDictionaryArrayIndexOutOfBoundsException(zlibCompressor);
     } else {
-      assertTrue("ZlibFactory is using native libs against request",
-          ZlibFactory.isNativeZlibLoaded(conf));
+      assertTrue(ZlibFactory.isNativeZlibLoaded(conf),
+          "ZlibFactory is using native libs against request");
     }
   }
 
@@ -256,22 +258,22 @@ public class TestZlibCompressorDecompressor {
   public void testZlibFactory() {
     Configuration cfg = new Configuration();
 
-    assertTrue("testZlibFactory compression level error !!!",
-        CompressionLevel.DEFAULT_COMPRESSION == ZlibFactory
-            .getCompressionLevel(cfg));
+    assertTrue(CompressionLevel.DEFAULT_COMPRESSION == ZlibFactory
+        .getCompressionLevel(cfg),
+        "testZlibFactory compression level error !!!");
 
-    assertTrue("testZlibFactory compression strategy error !!!",
-        CompressionStrategy.DEFAULT_STRATEGY == ZlibFactory
-            .getCompressionStrategy(cfg));
+    assertTrue(CompressionStrategy.DEFAULT_STRATEGY == ZlibFactory
+        .getCompressionStrategy(cfg),
+        "testZlibFactory compression strategy error !!!");
 
     ZlibFactory.setCompressionLevel(cfg, CompressionLevel.BEST_COMPRESSION);
-    assertTrue("testZlibFactory compression strategy error !!!",
-        CompressionLevel.BEST_COMPRESSION == ZlibFactory
-            .getCompressionLevel(cfg));
+    assertTrue(CompressionLevel.BEST_COMPRESSION == ZlibFactory
+        .getCompressionLevel(cfg),
+        "testZlibFactory compression strategy error !!!");
 
     ZlibFactory.setCompressionStrategy(cfg, CompressionStrategy.FILTERED);
-    assertTrue("testZlibFactory compression strategy error !!!",
-        CompressionStrategy.FILTERED == ZlibFactory.getCompressionStrategy(cfg));
+    assertTrue(CompressionStrategy.FILTERED == ZlibFactory.getCompressionStrategy(cfg),
+        "testZlibFactory compression strategy error !!!");
   }
   
 
@@ -344,9 +346,8 @@ public class TestZlibCompressorDecompressor {
     assertTrue(zlibDecompressor.getBytesRead() == cSize);
     zlibDecompressor.reset();
     assertTrue(zlibDecompressor.getRemaining() == 0);
-    assertArrayEquals(
-        "testZlibCompressorDecompressorWithConfiguration array equals error",
-        rawData, decompressedRawData);
+    assertArrayEquals(rawData, decompressedRawData,
+        "testZlibCompressorDecompressorWithConfiguration array equals error");
 
     return decompressedRawData;
   }
@@ -370,10 +371,10 @@ public class TestZlibCompressorDecompressor {
       fail("testBuiltInGzipDecompressorExceptions aioob error" + ex);
     }        
     
-    assertTrue("decompresser.getBytesRead error",
-        decompresser.getBytesRead() == 0);
-    assertTrue("decompresser.getRemaining error",
-        decompresser.getRemaining() == 0);
+    assertTrue(decompresser.getBytesRead() == 0,
+        "decompresser.getBytesRead error");
+    assertTrue(decompresser.getRemaining() == 0,
+        "decompresser.getRemaining error");
     decompresser.reset();
     decompresser.end();
 

+ 72 - 56
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java

@@ -28,9 +28,9 @@ import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.io.compress.DecompressorStream;
 import org.apache.hadoop.io.compress.ZStandardCodec;
 import org.apache.hadoop.test.MultithreadedTestUtil;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
@@ -46,11 +46,12 @@ import java.util.Random;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 public class TestZStandardCompressorDecompressor {
   private final static char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
@@ -59,7 +60,7 @@ public class TestZStandardCompressorDecompressor {
   private static File compressedFile;
   private static File uncompressedFile;
 
-  @BeforeClass
+  @BeforeAll
   public static void beforeClass() throws Exception {
     CONFIGURATION.setInt(IO_FILE_BUFFER_SIZE_KEY, 1024 * 64);
     uncompressedFile = new File(TestZStandardCompressorDecompressor.class
@@ -68,7 +69,7 @@ public class TestZStandardCompressorDecompressor {
         .getResource("/zstd/test_file.txt.zst").toURI());
   }
 
-  @Before
+  @BeforeEach
   public void before() throws Exception {
     assumeTrue(ZStandardCodec.isNativeCodeLoaded());
   }
@@ -112,71 +113,87 @@ public class TestZStandardCompressorDecompressor {
     assertArrayEquals(bytes, byteArrayOutputStream.toByteArray());
   }
 
-  @Test(expected = NullPointerException.class)
+  @Test
   public void testCompressorSetInputNullPointerException() {
-    ZStandardCompressor compressor = new ZStandardCompressor();
-    compressor.setInput(null, 0, 10);
+    assertThrows(NullPointerException.class, () -> {
+      ZStandardCompressor compressor = new ZStandardCompressor();
+      compressor.setInput(null, 0, 10);
+    });
   }
 
   //test on NullPointerException in {@code decompressor.setInput()}
-  @Test(expected = NullPointerException.class)
+  @Test
   public void testDecompressorSetInputNullPointerException() {
-    ZStandardDecompressor decompressor =
-        new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
-    decompressor.setInput(null, 0, 10);
+    assertThrows(NullPointerException.class, () -> {
+      ZStandardDecompressor decompressor =
+          new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
+      decompressor.setInput(null, 0, 10);
+    });
   }
 
   //test on ArrayIndexOutOfBoundsException in {@code compressor.setInput()}
-  @Test(expected = ArrayIndexOutOfBoundsException.class)
+  @Test
   public void testCompressorSetInputAIOBException() {
-    ZStandardCompressor compressor = new ZStandardCompressor();
-    compressor.setInput(new byte[] {}, -5, 10);
+    assertThrows(ArrayIndexOutOfBoundsException.class, () -> {
+      ZStandardCompressor compressor = new ZStandardCompressor();
+      compressor.setInput(new byte[] {}, -5, 10);
+    });
   }
 
   //test on ArrayIndexOutOfBoundsException in {@code decompressor.setInput()}
-  @Test(expected = ArrayIndexOutOfBoundsException.class)
+  @Test
   public void testDecompressorSetInputAIOUBException() {
-    ZStandardDecompressor decompressor =
-        new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
-    decompressor.setInput(new byte[] {}, -5, 10);
+    assertThrows(ArrayIndexOutOfBoundsException.class, () -> {
+      ZStandardDecompressor decompressor =
+          new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
+      decompressor.setInput(new byte[] {}, -5, 10);
+    });
   }
 
   //test on NullPointerException in {@code compressor.compress()}
-  @Test(expected = NullPointerException.class)
+  @Test
   public void testCompressorCompressNullPointerException() throws Exception {
-    ZStandardCompressor compressor = new ZStandardCompressor();
-    byte[] bytes = generate(1024 * 6);
-    compressor.setInput(bytes, 0, bytes.length);
-    compressor.compress(null, 0, 0);
+    assertThrows(NullPointerException.class, () -> {
+      ZStandardCompressor compressor = new ZStandardCompressor();
+      byte[] bytes = generate(1024 * 6);
+      compressor.setInput(bytes, 0, bytes.length);
+      compressor.compress(null, 0, 0);
+    });
   }
 
   //test on NullPointerException in {@code decompressor.decompress()}
-  @Test(expected = NullPointerException.class)
+  @Test
   public void testDecompressorCompressNullPointerException() throws Exception {
-    ZStandardDecompressor decompressor =
-        new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
-    byte[] bytes = generate(1024 * 6);
-    decompressor.setInput(bytes, 0, bytes.length);
-    decompressor.decompress(null, 0, 0);
+    assertThrows(NullPointerException.class, () -> {
+      ZStandardDecompressor decompressor =
+          new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
+      byte[] bytes = generate(1024 * 6);
+      decompressor.setInput(bytes, 0, bytes.length);
+      decompressor.decompress(null, 0, 0);
+    });
   }
 
   //test on ArrayIndexOutOfBoundsException in {@code compressor.compress()}
-  @Test(expected = ArrayIndexOutOfBoundsException.class)
+  @Test
   public void testCompressorCompressAIOBException() throws Exception {
-    ZStandardCompressor compressor = new ZStandardCompressor();
-    byte[] bytes = generate(1024 * 6);
-    compressor.setInput(bytes, 0, bytes.length);
-    compressor.compress(new byte[] {}, 0, -1);
+    assertThrows(ArrayIndexOutOfBoundsException.class, () -> {
+      ZStandardCompressor compressor = new ZStandardCompressor();
+      byte[] bytes = generate(1024 * 6);
+      compressor.setInput(bytes, 0, bytes.length);
+      compressor.compress(new byte[] {}, 0, -1);
+    });
   }
 
   //test on ArrayIndexOutOfBoundsException in decompressor.decompress()
-  @Test(expected = ArrayIndexOutOfBoundsException.class)
+  @Test
   public void testDecompressorCompressAIOBException() throws Exception {
-    ZStandardDecompressor decompressor =
-        new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
-    byte[] bytes = generate(1024 * 6);
-    decompressor.setInput(bytes, 0, bytes.length);
-    decompressor.decompress(new byte[] {}, 0, -1);
+    assertThrows(ArrayIndexOutOfBoundsException.class, () -> {
+      ZStandardDecompressor decompressor =
+          new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT);
+      byte[] bytes = generate(1024 * 6);
+      decompressor.setInput(bytes, 0, bytes.length);
+      decompressor.decompress(new byte[] {}, 0, -1);
+    });
   }
 
   // test ZStandardCompressor compressor.compress()
@@ -186,7 +203,7 @@ public class TestZStandardCompressorDecompressor {
     int bytesSize = 1024 * 2056 + 1;
     ZStandardCompressor compressor = new ZStandardCompressor();
     byte[] bytes = generate(bytesSize);
-    assertTrue("needsInput error !!!", compressor.needsInput());
+    assertTrue(compressor.needsInput(), "needsInput error !!!");
     compressor.setInput(bytes, 0, bytes.length);
     byte[] emptyBytes = new byte[bytesSize];
     int cSize = compressor.compress(emptyBytes, 0, bytes.length);
@@ -224,8 +241,8 @@ public class TestZStandardCompressorDecompressor {
 
       byte[] result = new byte[byteSize];
       inflateIn.read(result);
-      assertArrayEquals("original array not equals compress/decompressed array",
-          result, bytes);
+      assertArrayEquals(result, bytes,
+          "original array not equals compress/decompressed array");
     } finally {
       IOUtils.closeStream(inflateIn);
     }
@@ -281,9 +298,8 @@ public class TestZStandardCompressorDecompressor {
 
       byte[] result = new byte[byteSize];
       inflateIn.read(result);
-      assertArrayEquals(
-              "original array not equals compress/decompressed array", bytes,
-              result);
+      assertArrayEquals(bytes, result,
+          "original array not equals compress/decompressed array");
     } finally {
       IOUtils.closeStream(deflateOut);
       IOUtils.closeStream(inflateIn);
@@ -383,16 +399,16 @@ public class TestZStandardCompressorDecompressor {
     ZStandardCompressor compressor = new ZStandardCompressor();
     ZStandardDecompressor decompressor = new ZStandardDecompressor(rawDataSize);
     assertTrue(compressor.needsInput());
-    assertFalse("testZStandardCompressDecompress finished error",
-        compressor.finished());
+    assertFalse(compressor.finished(),
+        "testZStandardCompressDecompress finished error");
     compressor.setInput(rawData, 0, rawData.length);
     compressor.finish();
 
     byte[] compressedResult = new byte[rawDataSize];
     int cSize = compressor.compress(compressedResult, 0, rawDataSize);
     assertEquals(rawDataSize, compressor.getBytesRead());
-    assertTrue("compressed size no less then original size",
-        cSize < rawDataSize);
+    assertTrue(cSize < rawDataSize,
+        "compressed size no less then original size");
     decompressor.setInput(compressedResult, 0, cSize);
     byte[] decompressedBytes = new byte[rawDataSize];
     decompressor.decompress(decompressedBytes, 0, decompressedBytes.length);

+ 20 - 20
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java

@@ -34,12 +34,12 @@ import org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.XORRawErasureCoderFactory;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 /**
  * Test the codec to raw coder mapping.
@@ -49,7 +49,7 @@ public class TestCodecRawCoderMapping {
   private static final int numDataUnit = 6;
   private static final int numParityUnit = 3;
 
-  @Before
+  @BeforeEach
   public void setup() {
     conf = new Configuration();
   }
@@ -64,20 +64,20 @@ public class TestCodecRawCoderMapping {
     RawErasureDecoder decoder = CodecUtil.createRawDecoder(
         conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
     if (ErasureCodeNative.isNativeCodeLoaded()) {
-      Assert.assertTrue(encoder instanceof NativeRSRawEncoder);
-      Assert.assertTrue(decoder instanceof NativeRSRawDecoder);
+      assertTrue(encoder instanceof NativeRSRawEncoder);
+      assertTrue(decoder instanceof NativeRSRawDecoder);
     } else {
-      Assert.assertTrue(encoder instanceof RSRawEncoder);
-      Assert.assertTrue(decoder instanceof RSRawDecoder);
+      assertTrue(encoder instanceof RSRawEncoder);
+      assertTrue(decoder instanceof RSRawDecoder);
     }
 
     // should return default raw coder of rs-legacy codec
     encoder = CodecUtil.createRawEncoder(conf,
         ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(encoder instanceof RSLegacyRawEncoder);
+    assertTrue(encoder instanceof RSLegacyRawEncoder);
     decoder = CodecUtil.createRawDecoder(conf,
         ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(decoder instanceof RSLegacyRawDecoder);
+    assertTrue(decoder instanceof RSLegacyRawDecoder);
   }
 
   @Test
@@ -92,7 +92,7 @@ public class TestCodecRawCoderMapping {
     try {
       CodecUtil.createRawEncoder(conf,
           ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
-      Assert.fail();
+      fail();
     } catch (Exception e) {
       GenericTestUtils.assertExceptionContains(
           "Fail to create raw erasure encoder with given codec: rs", e);
@@ -104,7 +104,7 @@ public class TestCodecRawCoderMapping {
     try {
       CodecUtil.createRawEncoder(conf,
           ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-      Assert.fail();
+      fail();
     } catch (Exception e) {
       GenericTestUtils.assertExceptionContains(
           "Fail to create raw erasure encoder with given codec: rs", e);
@@ -121,10 +121,10 @@ public class TestCodecRawCoderMapping {
     // should return default raw coder of rs codec
     RawErasureEncoder encoder = CodecUtil.createRawEncoder(
             conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
-    Assert.assertTrue(encoder instanceof RSRawEncoder);
+    assertTrue(encoder instanceof RSRawEncoder);
     RawErasureDecoder decoder = CodecUtil.createRawDecoder(
             conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
-    Assert.assertTrue(decoder instanceof RSRawDecoder);
+    assertTrue(decoder instanceof RSRawDecoder);
   }
 
   @Test
@@ -134,10 +134,10 @@ public class TestCodecRawCoderMapping {
     // should return default raw coder of rs-legacy codec
     RawErasureEncoder encoder = CodecUtil.createRawEncoder(
             conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(encoder instanceof RSLegacyRawEncoder);
+    assertTrue(encoder instanceof RSLegacyRawEncoder);
     RawErasureDecoder decoder = CodecUtil.createRawDecoder(
             conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions);
-    Assert.assertTrue(decoder instanceof RSLegacyRawDecoder);
+    assertTrue(decoder instanceof RSLegacyRawDecoder);
   }
 
   @Test
@@ -149,10 +149,10 @@ public class TestCodecRawCoderMapping {
     // should return second coder specified by IO_ERASURECODE_CODEC_CODERS
     RawErasureEncoder encoder = CodecUtil.createRawEncoder(
             conf, ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
-    Assert.assertTrue(encoder instanceof XORRawEncoder);
+    assertTrue(encoder instanceof XORRawEncoder);
     RawErasureDecoder decoder = CodecUtil.createRawDecoder(
             conf, ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
-    Assert.assertTrue(decoder instanceof XORRawDecoder);
+    assertTrue(decoder instanceof XORRawDecoder);
   }
 
   @Test

+ 4 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRegistry.java

@@ -25,15 +25,15 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.XORRawErasureCoderFactory;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * Test CodecRegistry.

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java

@@ -26,7 +26,7 @@ import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Random;
 
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * Test base of common utilities for tests not only raw coders but also block
@@ -159,7 +159,7 @@ public abstract class TestCoderBase {
     byte[][] recovered = toArrays(recoveredChunks);
     boolean result = Arrays.deepEquals(erased, recovered);
     if (!result) {
-      assertTrue("Decoding and comparing failed.", result);
+      assertTrue(result, "Decoding and comparing failed.");
     }
   }
 

+ 6 - 10
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestECSchema.java

@@ -17,23 +17,19 @@
  */
 package org.apache.hadoop.io.erasurecode;
 
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
 
 import java.util.HashMap;
 import java.util.Map;
 import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
 
+@Timeout(300)
 public class TestECSchema {
 
-  @Rule
-  public Timeout globalTimeout = new Timeout(300000, TimeUnit.MILLISECONDS);
-
   @Test
   public void testGoodSchema() {
     int numDataUnits = 6;
@@ -60,7 +56,7 @@ public class TestECSchema {
     extraMap.put(extraOption, extraOptionValue);
     ECSchema sameSchema = new ECSchema(codec, numDataUnits, numParityUnits,
         extraMap);
-    assertEquals("Different constructors not equal", sameSchema, schema);
+    assertEquals(sameSchema, schema, "Different constructors not equal");
   }
 
   @Test

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestErasureCodingEncodeAndDecode.java

@@ -21,11 +21,11 @@ package org.apache.hadoop.io.erasurecode;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.Random;
 
-import static org.junit.Assert.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 
 public class TestErasureCodingEncodeAndDecode {
 

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/codec/TestHHXORErasureCodec.java

@@ -21,9 +21,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.erasurecode.ECSchema;
 import org.apache.hadoop.io.erasurecode.ErasureCodecOptions;
 import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestHHXORErasureCodec {
   private ECSchema schema = new ECSchema("hhxor", 10, 4);

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java

@@ -26,7 +26,7 @@ import org.apache.hadoop.io.erasurecode.TestCoderBase;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
 
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
 
 /**
  * Erasure coder test base with utilities.

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java

@@ -22,7 +22,7 @@ import org.apache.hadoop.io.erasurecode.ECChunk;
 
 import java.io.IOException;
 
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
 
 
 /**

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java

@@ -20,12 +20,12 @@ package org.apache.hadoop.io.erasurecode.coder;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.erasurecode.CodecUtil;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class TestHHXORErasureCoder extends TestHHErasureCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
     this.encoderClass = HHXORErasureEncoder.class;
     this.decoderClass = HHXORErasureDecoder.class;

+ 5 - 9
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java

@@ -20,21 +20,17 @@ package org.apache.hadoop.io.erasurecode.coder;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.erasurecode.CodecUtil;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
-
-import java.util.concurrent.TimeUnit;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 /**
  * Test Reed-Solomon encoding and decoding.
  */
+@Timeout(300)
 public class TestRSErasureCoder extends TestErasureCoderBase {
-  @Rule
-  public Timeout globalTimeout = new Timeout(300000, TimeUnit.MILLISECONDS);
 
-  @Before
+  @BeforeEach
   public void setup() {
     this.encoderClass = RSErasureEncoder.class;
     this.decoderClass = RSErasureDecoder.class;

+ 5 - 10
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java

@@ -17,22 +17,17 @@
  */
 package org.apache.hadoop.io.erasurecode.coder;
 
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
-
-import java.util.concurrent.TimeUnit;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 /**
  * Test XOR encoding and decoding.
  */
+@Timeout(300)
 public class TestXORCoder extends TestErasureCoderBase {
 
-  @Rule
-  public Timeout globalTimeout = new Timeout(300000, TimeUnit.MILLISECONDS);
-
-  @Before
+  @BeforeEach
   public void setup() {
     this.encoderClass = XORErasureEncoder.class;
     this.decoderClass = XORErasureDecoder.class;

+ 8 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestCoderUtil.java

@@ -19,11 +19,12 @@
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.nio.ByteBuffer;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 /**
  * Test of the utility of raw erasure coder.
@@ -116,9 +117,11 @@ public class TestCoderUtil {
     assertEquals(firstValidInput, inputs[8]);
   }
 
-  @Test(expected = HadoopIllegalArgumentException.class)
+  @Test
   public void testNoValidInput() {
-    byte[][] inputs = new byte[numInputs][];
-    CoderUtil.findFirstValidInput(inputs);
+    assertThrows(HadoopIllegalArgumentException.class, () -> {
+      byte[][] inputs = new byte[numInputs][];
+      CoderUtil.findFirstValidInput(inputs);
+    });
   }
 }

+ 57 - 34
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDecodingValidator.java

@@ -20,12 +20,9 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 import org.apache.hadoop.io.erasurecode.ECChunk;
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Assert;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
 
 import java.io.IOException;
 import java.util.Arrays;
@@ -34,17 +31,19 @@ import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 /**
  * Test {@link DecodingValidator} under various decoders.
  */
-@RunWith(Parameterized.class)
 public class TestDecodingValidator extends TestRawCoderBase {
 
   private DecodingValidator validator;
 
-  @Parameterized.Parameters
   public static Collection<Object[]> data() {
     return Arrays.asList(new Object[][] {
         {RSRawErasureCoderFactory.class, 6, 3, new int[]{1}, new int[]{}},
@@ -57,7 +56,7 @@ public class TestDecodingValidator extends TestRawCoderBase {
     });
   }
 
-  public TestDecodingValidator(
+  public void initTestDecodingValidator(
       Class<? extends RawErasureCoderFactory> factoryClass, int numDataUnits,
       int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes) {
     this.encoderFactoryClass = factoryClass;
@@ -66,13 +65,13 @@ public class TestDecodingValidator extends TestRawCoderBase {
     this.numParityUnits = numParityUnits;
     this.erasedDataIndexes = erasedDataIndexes;
     this.erasedParityIndexes = erasedParityIndexes;
+    setup();
   }
 
-  @Before
   public void setup() {
     if (encoderFactoryClass == NativeRSRawErasureCoderFactory.class
         || encoderFactoryClass == NativeXORRawErasureCoderFactory.class) {
-      Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+      assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
     }
     setAllowDump(false);
   }
@@ -80,8 +79,12 @@ public class TestDecodingValidator extends TestRawCoderBase {
   /**
    * Test if the same validator can process direct and non-direct buffers.
    */
-  @Test
-  public void testValidate() {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testValidate(Class<? extends RawErasureCoderFactory> factoryClass,
+      int numDataUnits, int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes) {
+    initTestDecodingValidator(factoryClass, numDataUnits, numParityUnits,
+        erasedDataIndexes, erasedParityIndexes);
     prepare(null, numDataUnits, numParityUnits, erasedDataIndexes,
         erasedParityIndexes);
     testValidate(true);
@@ -119,7 +122,7 @@ public class TestDecodingValidator extends TestRawCoderBase {
     try {
       encoder.encode(dataChunks, parityChunks);
     } catch (Exception e) {
-      Assert.fail("Should not get Exception: " + e.getMessage());
+      fail("Should not get Exception: " + e.getMessage());
     }
 
     // decode
@@ -133,7 +136,7 @@ public class TestDecodingValidator extends TestRawCoderBase {
     try {
       decoder.decode(inputChunks, erasedIndexes, recoveredChunks);
     } catch (Exception e) {
-      Assert.fail("Should not get Exception: " + e.getMessage());
+      fail("Should not get Exception: " + e.getMessage());
     }
 
     // validate
@@ -146,7 +149,7 @@ public class TestDecodingValidator extends TestRawCoderBase {
       validator.validate(clonedInputChunks, clonedErasedIndexes,
           clonedRecoveredChunks);
     } catch (Exception e) {
-      Assert.fail("Should not get Exception: " + e.getMessage());
+      fail("Should not get Exception: " + e.getMessage());
     }
 
     // Check if input buffers' positions are moved to the end
@@ -154,8 +157,8 @@ public class TestDecodingValidator extends TestRawCoderBase {
 
     // Check if validator does not change recovered chunks and erased indexes
     verifyChunksEqual(recoveredChunks, clonedRecoveredChunks);
-    Assert.assertArrayEquals("Erased indexes should not be changed",
-        erasedIndexes, clonedErasedIndexes);
+    assertArrayEquals(erasedIndexes, clonedErasedIndexes,
+        "Erased indexes should not be changed");
 
     // Check if validator uses correct indexes for validation
     List<Integer> validIndexesList =
@@ -167,31 +170,33 @@ public class TestDecodingValidator extends TestRawCoderBase {
     List<Integer> erasedIndexesList =
         IntStream.of(erasedIndexes).boxed().collect(Collectors.toList());
     int newErasedIndex = validator.getNewErasedIndex();
-    Assert.assertTrue(
+    assertTrue(newValidIndexesList.containsAll(erasedIndexesList),
         "Valid indexes for validation should contain"
-        + " erased indexes for decoding",
-        newValidIndexesList.containsAll(erasedIndexesList));
-    Assert.assertTrue(
+        + " erased indexes for decoding");
+    assertTrue(validIndexesList.contains(newErasedIndex),
         "An erased index for validation should be contained"
-        + " in valid indexes for decoding",
-        validIndexesList.contains(newErasedIndex));
-    Assert.assertFalse(
+        + " in valid indexes for decoding");
+    assertFalse(newValidIndexesList.contains(newErasedIndex),
         "An erased index for validation should not be contained"
-        + " in valid indexes for validation",
-        newValidIndexesList.contains(newErasedIndex));
+        + " in valid indexes for validation");
   }
 
   private void verifyChunksEqual(ECChunk[] chunks1, ECChunk[] chunks2) {
     boolean result = Arrays.deepEquals(toArrays(chunks1), toArrays(chunks2));
-    assertTrue("Recovered chunks should not be changed", result);
+    assertTrue(result, "Recovered chunks should not be changed");
   }
 
   /**
    * Test if validator throws {@link InvalidDecodingException} when
    * a decoded output buffer is polluted.
    */
-  @Test
-  public void testValidateWithBadDecoding() throws IOException {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testValidateWithBadDecoding(Class<? extends RawErasureCoderFactory> factoryClass,
+      int numDataUnits, int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes)
+      throws IOException {
+    initTestDecodingValidator(factoryClass, numDataUnits, numParityUnits,
+        erasedDataIndexes, erasedParityIndexes);
     prepare(null, numDataUnits, numParityUnits, erasedDataIndexes,
         erasedParityIndexes);
     this.usingDirectBuffer = true;
@@ -206,7 +211,7 @@ public class TestDecodingValidator extends TestRawCoderBase {
     try {
       encoder.encode(dataChunks, parityChunks);
     } catch (Exception e) {
-      Assert.fail("Should not get Exception: " + e.getMessage());
+      fail("Should not get Exception: " + e.getMessage());
     }
 
     // decode
@@ -220,7 +225,7 @@ public class TestDecodingValidator extends TestRawCoderBase {
     try {
       decoder.decode(inputChunks, erasedIndexes, recoveredChunks);
     } catch (Exception e) {
-      Assert.fail("Should not get Exception: " + e.getMessage());
+      fail("Should not get Exception: " + e.getMessage());
     }
 
     // validate
@@ -228,10 +233,28 @@ public class TestDecodingValidator extends TestRawCoderBase {
     polluteSomeChunk(recoveredChunks);
     try {
       validator.validate(inputChunks, erasedIndexes, recoveredChunks);
-      Assert.fail("Validation should fail due to bad decoding");
+      fail("Validation should fail due to bad decoding");
     } catch (InvalidDecodingException e) {
       String expected = "Failed to validate decoding";
       GenericTestUtils.assertExceptionContains(expected, e);
     }
   }
+
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testIdempotentReleases(Class<? extends RawErasureCoderFactory> factoryClass,
+      int numDataUnits, int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes) {
+    initTestDecodingValidator(factoryClass, numDataUnits, numParityUnits,
+        erasedDataIndexes, erasedParityIndexes);
+    prepareCoders(true);
+
+    for (int i = 0; i < 3; i++) {
+      encoder.release();
+      decoder.release();
+    }
+  }
+
+  @Test
+  public void testIdempotentReleases() {
+  }
 }

+ 7 - 6
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java

@@ -17,10 +17,11 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assertions.fail;
+
 import org.apache.hadoop.io.erasurecode.ECChunk;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
@@ -29,7 +30,7 @@ import java.nio.ByteBuffer;
  * Test dummy raw coder.
  */
 public class TestDummyRawCoder extends TestRawCoderBase {
-  @Before
+  @BeforeEach
   public void setup() {
     encoderFactoryClass = DummyRawErasureCoderFactory.class;
     decoderFactoryClass = DummyRawErasureCoderFactory.class;
@@ -64,7 +65,7 @@ public class TestDummyRawCoder extends TestRawCoderBase {
     try {
       encoder.encode(dataChunks, parityChunks);
     } catch (IOException e) {
-      Assert.fail("Unexpected IOException: " + e.getMessage());
+      fail("Unexpected IOException: " + e.getMessage());
     }
     compareAndVerify(parityChunks, getEmptyChunks(parityChunks.length));
 
@@ -79,7 +80,7 @@ public class TestDummyRawCoder extends TestRawCoderBase {
       decoder.decode(inputChunks, getErasedIndexesForDecoding(),
           recoveredChunks);
     } catch (IOException e) {
-      Assert.fail("Unexpected IOException: " + e.getMessage());
+      fail("Unexpected IOException: " + e.getMessage());
     }
     compareAndVerify(recoveredChunks, getEmptyChunks(recoveredChunks.length));
   }

+ 6 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java

@@ -17,19 +17,20 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  * Test native raw Reed-solomon encoding and decoding.
  */
 public class TestNativeRSRawCoder extends TestRSRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
-    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
     this.encoderFactoryClass = NativeRSRawErasureCoderFactory.class;
     this.decoderFactoryClass = NativeRSRawErasureCoderFactory.class;
     setAllowDump(true);

+ 6 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java

@@ -17,19 +17,20 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  * Test NativeXOR encoding and decoding.
  */
 public class TestNativeXORRawCoder extends TestXORRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
-    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
     this.encoderFactoryClass = NativeXORRawErasureCoderFactory.class;
     this.decoderFactoryClass = NativeXORRawErasureCoderFactory.class;
     setAllowDump(true);

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java

@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * Test the legacy raw Reed-solomon coder implemented in Java.
  */
 public class TestRSLegacyRawCoder extends TestRSRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
     this.encoderFactoryClass = RSLegacyRawErasureCoderFactory.class;
     this.decoderFactoryClass = RSLegacyRawErasureCoderFactory.class;

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java

@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * Test the new raw Reed-solomon coder implemented in Java.
  */
 public class TestRSRawCoder extends TestRSRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
     this.encoderFactoryClass = RSRawErasureCoderFactory.class;
     this.decoderFactoryClass = RSRawErasureCoderFactory.class;

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java

@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 /**
  * Test base for raw Reed-solomon coders.

+ 5 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java

@@ -17,18 +17,19 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
-import org.junit.Assume;
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * Test raw Reed-solomon coder implemented in Java.
  */
 public class TestRSRawCoderInteroperable1 extends TestRSRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
-    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
 
     this.encoderFactoryClass = RSRawErasureCoderFactory.class;
     this.decoderFactoryClass = NativeRSRawErasureCoderFactory.class;

+ 5 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java

@@ -17,18 +17,19 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
-import org.junit.Assume;
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * Test raw Reed-solomon coder implemented in Java.
  */
 public class TestRSRawCoderInteroperable2 extends TestRSRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
-    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
 
     this.encoderFactoryClass = NativeRSRawErasureCoderFactory.class;
     this.decoderFactoryClass = RSRawErasureCoderFactory.class;

+ 13 - 11
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java

@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
+
 import org.apache.hadoop.io.erasurecode.ECChunk;
 import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.TestCoderBase;
 import org.apache.hadoop.test.LambdaTestUtils;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 
@@ -85,7 +87,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
 
     try {
       performTestCoding(baseChunkSize, false, true, false, true);
-      Assert.fail("Encoding test with bad input should fail");
+      fail("Encoding test with bad input should fail");
     } catch (Exception e) {
       // Expected
     }
@@ -101,7 +103,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
 
     try {
       performTestCoding(baseChunkSize, false, false, true, true);
-      Assert.fail("Decoding test with bad output should fail");
+      fail("Decoding test with bad output should fail");
     } catch (Exception e) {
       // Expected
     }
@@ -133,14 +135,14 @@ public abstract class TestRawCoderBase extends TestCoderBase {
   public void testCodingWithErasingTooMany() {
     try {
       testCoding(true);
-      Assert.fail("Decoding test erasing too many should fail");
+      fail("Decoding test erasing too many should fail");
     } catch (Exception e) {
       // Expected
     }
 
     try {
       testCoding(false);
-      Assert.fail("Decoding test erasing too many should fail");
+      fail("Decoding test erasing too many should fail");
     } catch (Exception e) {
       // Expected
     }
@@ -182,7 +184,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
     try {
       encoder.encode(dataChunks, parityChunks);
     } catch (IOException e) {
-      Assert.fail("Should not get IOException: " + e.getMessage());
+      fail("Should not get IOException: " + e.getMessage());
     }
     dumpChunks("Encoded parity chunks", parityChunks);
 
@@ -217,7 +219,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
       decoder.decode(inputChunks, getErasedIndexesForDecoding(),
           recoveredChunks);
     } catch (IOException e) {
-      Assert.fail("Should not get IOException: " + e.getMessage());
+      fail("Should not get IOException: " + e.getMessage());
     }
     dumpChunks("Decoded/recovered chunks", recoveredChunks);
 
@@ -315,7 +317,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
     try {
       encoder.encode(dataChunks, parityChunks);
     } catch (IOException e) {
-      Assert.fail("Should not get IOException: " + e.getMessage());
+      fail("Should not get IOException: " + e.getMessage());
     }
     verifyBufferPositionAtEnd(dataChunks);
 
@@ -329,7 +331,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
       decoder.decode(inputChunks, getErasedIndexesForDecoding(),
           recoveredChunks);
     } catch (IOException e) {
-      Assert.fail("Should not get IOException: " + e.getMessage());
+      fail("Should not get IOException: " + e.getMessage());
     }
     verifyBufferPositionAtEnd(inputChunks);
   }
@@ -337,7 +339,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
   void verifyBufferPositionAtEnd(ECChunk[] inputChunks) {
     for (ECChunk chunk : inputChunks) {
       if (chunk != null) {
-        Assert.assertEquals(0, chunk.getBuffer().remaining());
+        assertEquals(0, chunk.getBuffer().remaining());
       }
     }
   }

+ 4 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawErasureCoderBenchmark.java

@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
-import org.junit.Assume;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 /**
  * Tests for the raw erasure coder benchmark tool.
@@ -55,7 +56,7 @@ public class TestRawErasureCoderBenchmark {
 
   @Test
   public void testISALCoder() throws Exception {
-    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
     // ISA-L coder
     RawErasureCoderBenchmark.performBench("encode",
         RawErasureCoderBenchmark.CODER.ISAL_CODER, 5, 300, 64);

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java

@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * Test pure Java XOR encoding and decoding.
  */
 public class TestXORRawCoder extends TestXORRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
     this.encoderFactoryClass = XORRawErasureCoderFactory.class;
     this.decoderFactoryClass = XORRawErasureCoderFactory.class;

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderBase.java

@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 /**
  * Test base for raw XOR coders.

+ 5 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java

@@ -17,18 +17,19 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
-import org.junit.Assume;
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * Test raw XOR coder implemented in Java.
  */
 public class TestXORRawCoderInteroperable1 extends TestXORRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
-    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
     this.encoderFactoryClass = XORRawErasureCoderFactory.class;
     this.decoderFactoryClass = NativeXORRawErasureCoderFactory.class;
     setAllowDump(true);

+ 5 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java

@@ -17,18 +17,19 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder;
 
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
-import org.junit.Assume;
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * Test raw XOR coder implemented in Java.
  */
 public class TestXORRawCoderInteroperable2 extends TestXORRawCoderBase {
 
-  @Before
+  @BeforeEach
   public void setup() {
-    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
     this.encoderFactoryClass = NativeXORRawErasureCoderFactory.class;
     this.decoderFactoryClass = XORRawErasureCoderFactory.class;
     setAllowDump(true);

+ 6 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestCompression.java

@@ -18,20 +18,22 @@
 package org.apache.hadoop.io.file.tfile;
 
 import org.apache.hadoop.test.LambdaTestUtils;
-import org.junit.*;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestCompression {
 
-  @BeforeClass
+  @BeforeAll
   public static void resetConfigBeforeAll() {
     Compression.Algorithm.LZO.conf.setBoolean("test.reload.lzo.codec", true);
   }
 
-  @AfterClass
+  @AfterAll
   public static void resetConfigAfterAll() {
     Compression.Algorithm.LZO.conf.setBoolean("test.reload.lzo.codec", false);
   }

+ 42 - 41
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java

@@ -32,12 +32,12 @@ import org.apache.hadoop.io.file.tfile.TFile.Reader;
 import org.apache.hadoop.io.file.tfile.TFile.Writer;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 /**
  * test tfile features.
@@ -51,13 +51,13 @@ public class TestTFile {
   private static final int largeVal = 3 * 1024 * 1024;
   private static final String localFormatter = "%010d";
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     conf = new Configuration();
     fs = FileSystem.get(conf);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     // do nothing
   }
@@ -109,18 +109,19 @@ public class TestTFile {
       byte[] val = readValue(scanner);
       String keyStr = String.format(localFormatter, i);
       String valStr = value + keyStr;
-      assertTrue("bytes for keys do not match " + keyStr + " "
-          + new String(key), Arrays.equals(keyStr.getBytes(), key));
-      assertTrue("bytes for vals do not match " + valStr + " "
-          + new String(val), Arrays.equals(
-          valStr.getBytes(), val));
+      assertTrue(Arrays.equals(keyStr.getBytes(), key),
+          "bytes for keys do not match " + keyStr + " "
+          + new String(key));
+      assertTrue(Arrays.equals(
+          valStr.getBytes(), val), "bytes for vals do not match " + valStr + " "
+          + new String(val));
       assertTrue(scanner.advance());
       key = readKey(scanner);
       val = readValue(scanner);
-      assertTrue("bytes for keys do not match", Arrays.equals(
-          keyStr.getBytes(), key));
-      assertTrue("bytes for vals do not match", Arrays.equals(
-          valStr.getBytes(), val));
+      assertTrue(Arrays.equals(
+          keyStr.getBytes(), key), "bytes for keys do not match");
+      assertTrue(Arrays.equals(
+          valStr.getBytes(), val), "bytes for vals do not match");
       assertTrue(scanner.advance());
     }
     return (start + n);
@@ -146,12 +147,12 @@ public class TestTFile {
     for (int i = start; i < (start + n); i++) {
       byte[] key = readKey(scanner);
       String keyStr = String.format(localFormatter, i);
-      assertTrue("bytes for keys do not match", Arrays.equals(
-          keyStr.getBytes(), key));
+      assertTrue(Arrays.equals(
+          keyStr.getBytes(), key), "bytes for keys do not match");
       scanner.advance();
       key = readKey(scanner);
-      assertTrue("bytes for keys do not match", Arrays.equals(
-          keyStr.getBytes(), key));
+      assertTrue(Arrays.equals(
+          keyStr.getBytes(), key), "bytes for keys do not match");
       scanner.advance();
     }
     return (start + n);
@@ -175,9 +176,9 @@ public class TestTFile {
     for (int i = 0; i < n; i++) {
       readKey = readKey(scanner);
       readValue = readValue(scanner);
-      assertTrue("failed to match keys", Arrays.equals(readKey, key));
-      assertTrue("failed to match values", Arrays.equals(readValue, value));
-      assertTrue("failed to advance cursor", scanner.advance());
+      assertTrue(Arrays.equals(readKey, key), "failed to match keys");
+      assertTrue(Arrays.equals(readValue, value), "failed to match values");
+      assertTrue(scanner.advance(), "failed to advance cursor");
     }
   }
 
@@ -206,10 +207,10 @@ public class TestTFile {
     for (int i = start; i < (start + n); i++) {
       String key = String.format(localFormatter, i);
       byte[] read = readKey(scanner);
-      assertTrue("keys not equal", Arrays.equals(key.getBytes(), read));
+      assertTrue(Arrays.equals(key.getBytes(), read), "keys not equal");
       String value = "value" + key;
       read = readValue(scanner);
-      assertTrue("values not equal", Arrays.equals(value.getBytes(), read));
+      assertTrue(Arrays.equals(value.getBytes(), read), "values not equal");
       scanner.advance();
     }
     return (start + n);
@@ -235,7 +236,7 @@ public class TestTFile {
     for (int i = start; i < start; i++) {
       String key = String.format(localFormatter, i);
       byte[] read = readKey(scanner);
-      assertTrue("keys not equal", Arrays.equals(key.getBytes(), read));
+      assertTrue(Arrays.equals(key.getBytes(), read), "keys not equal");
       try {
         read = readValue(scanner);
         assertTrue(false);
@@ -245,7 +246,7 @@ public class TestTFile {
       }
       String value = "value" + key;
       read = readLongValue(scanner, value.getBytes().length);
-      assertTrue("values nto equal", Arrays.equals(read, value.getBytes()));
+      assertTrue(Arrays.equals(read, value.getBytes()), "values nto equal");
       scanner.advance();
     }
     return (start + n);
@@ -294,11 +295,11 @@ public class TestTFile {
     Scanner scanner = reader.createScanner();
     readAllRecords(scanner);
     scanner.seekTo(getSomeKey(50));
-    assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50)));
+    assertTrue(scanner.seekTo(getSomeKey(50)), "location lookup failed");
     // read the key and see if it matches
     byte[] readKey = readKey(scanner);
-    assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50),
-        readKey));
+    assertTrue(Arrays.equals(getSomeKey(50),
+        readKey), "seeked key does not match");
 
     scanner.seekTo(new byte[0]);
     byte[] val1 = readValue(scanner);
@@ -308,19 +309,19 @@ public class TestTFile {
     
     // check for lowerBound
     scanner.lowerBound(getSomeKey(50));
-    assertTrue("locaton lookup failed", scanner.currentLocation
-        .compareTo(reader.end()) < 0);
+    assertTrue(scanner.currentLocation
+        .compareTo(reader.end()) < 0, "locaton lookup failed");
     readKey = readKey(scanner);
-    assertTrue("seeked key does not match", Arrays.equals(readKey,
-        getSomeKey(50)));
+    assertTrue(Arrays.equals(readKey,
+        getSomeKey(50)), "seeked key does not match");
 
     // check for upper bound
     scanner.upperBound(getSomeKey(50));
-    assertTrue("location lookup failed", scanner.currentLocation
-        .compareTo(reader.end()) < 0);
+    assertTrue(scanner.currentLocation
+        .compareTo(reader.end()) < 0, "location lookup failed");
     readKey = readKey(scanner);
-    assertTrue("seeked key does not match", Arrays.equals(readKey,
-        getSomeKey(51)));
+    assertTrue(Arrays.equals(readKey,
+        getSomeKey(51)), "seeked key does not match");
 
     scanner.close();
     // test for a range of scanner
@@ -398,8 +399,8 @@ public class TestTFile {
       DataInputStream din = reader.getMetaBlock("TfileMeta" + i);
       byte b[] = new byte[len];
       din.readFully(b);
-      assertTrue("faield to match metadata", Arrays.equals(
-          ("something to test" + i).getBytes(), b));
+      assertTrue(Arrays.equals(
+          ("something to test" + i).getBytes(), b), "faield to match metadata");
       din.close();
     }
   }
@@ -416,7 +417,7 @@ public class TestTFile {
     }
     din = reader.getMetaBlock("TFileMeta100");
     int read = din.read();
-    assertTrue("check for status", (read == -1));
+    assertTrue((read == -1), "check for status");
     din.close();
   }
 

+ 49 - 45
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java

@@ -17,14 +17,18 @@
 
 package org.apache.hadoop.io.file.tfile;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.EOFException;
 import java.io.IOException;
 import java.util.Random;
 
-import org.junit.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -36,9 +40,9 @@ import org.apache.hadoop.io.file.tfile.TFile.Writer;
 import org.apache.hadoop.io.file.tfile.TFile.Reader.Location;
 import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  * 
@@ -87,7 +91,7 @@ public class TestTFileByteArrays {
     this.comparator = comparator;
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     path = new Path(ROOT, outputFile);
     fs = path.getFileSystem(conf);
@@ -95,7 +99,7 @@ public class TestTFileByteArrays {
     writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     if (!skip)
       fs.delete(path, true);
@@ -108,9 +112,9 @@ public class TestTFileByteArrays {
     closeOutput();
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Assert.assertTrue(reader.isSorted());
+    assertTrue(reader.isSorted());
     Scanner scanner = reader.createScanner();
-    Assert.assertTrue(scanner.atEnd());
+    assertTrue(scanner.atEnd());
     scanner.close();
     reader.close();
   }
@@ -242,7 +246,7 @@ public class TestTFileByteArrays {
     locate(scanner, composeSortedKey(KEY, records1stBlock - 1).getBytes());
     locate(scanner, composeSortedKey(KEY, records1stBlock).getBytes());
     Location locX = locate(scanner, "keyX".getBytes());
-    Assert.assertEquals(scanner.endLocation, locX);
+    assertEquals(scanner.endLocation, locX);
     scanner.close();
     reader.close();
   }
@@ -254,7 +258,7 @@ public class TestTFileByteArrays {
     Reader reader = null;
     try {
       reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-      Assert.fail("Cannot read before closing the writer.");
+      fail("Cannot read before closing the writer.");
     } catch (IOException e) {
       // noop, expecting exceptions
     } finally {
@@ -279,7 +283,7 @@ public class TestTFileByteArrays {
     // add the same metablock
     try {
       writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
-      Assert.fail("Cannot create metablocks with the same name.");
+      fail("Cannot create metablocks with the same name.");
     } catch (Exception e) {
       // noop, expecting exceptions
     }
@@ -302,11 +306,11 @@ public class TestTFileByteArrays {
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     DataInputStream mb = reader.getMetaBlock("testX");
-    Assert.assertNotNull(mb);
+    assertNotNull(mb);
     mb.close();
     try {
       DataInputStream mbBad = reader.getMetaBlock("testY");
-      Assert.fail("Error on handling non-existent metablocks.");
+      fail("Error on handling non-existent metablocks.");
     } catch (Exception e) {
       // noop, expecting exceptions
     }
@@ -328,7 +332,7 @@ public class TestTFileByteArrays {
     // add more key/value
     try {
       writer.append("keyY".getBytes(), "valueY".getBytes());
-      Assert.fail("Cannot add key/value after start adding meta blocks.");
+      fail("Cannot add key/value after start adding meta blocks.");
     } catch (Exception e) {
       // noop, expecting exceptions
     }
@@ -347,10 +351,10 @@ public class TestTFileByteArrays {
     byte[] vbuf = new byte[BUF_SIZE];
     int vlen = scanner.entry().getValueLength();
     scanner.entry().getValue(vbuf);
-    Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
+    assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
     try {
       scanner.entry().getValue(vbuf);
-      Assert.fail("Cannot get the value mlutiple times.");
+      fail("Cannot get the value mlutiple times.");
     } catch (Exception e) {
       // noop, expecting exceptions
     }
@@ -367,7 +371,7 @@ public class TestTFileByteArrays {
     out = fs.create(path);
     try {
       writer = new Writer(out, BLOCK_SIZE, "BAD", comparator, conf);
-      Assert.fail("Error on handling invalid compression codecs.");
+      fail("Error on handling invalid compression codecs.");
     } catch (Exception e) {
       // noop, expecting exceptions
       // e.printStackTrace();
@@ -385,7 +389,7 @@ public class TestTFileByteArrays {
     out.close();
     try {
       new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-      Assert.fail("Error on handling empty files.");
+      fail("Error on handling empty files.");
     } catch (EOFException e) {
       // noop, expecting exceptions
     }
@@ -409,7 +413,7 @@ public class TestTFileByteArrays {
     out.close();
     try {
       new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-      Assert.fail("Error on handling random files.");
+      fail("Error on handling random files.");
     } catch (IOException e) {
       // noop, expecting exceptions
     }
@@ -437,7 +441,7 @@ public class TestTFileByteArrays {
     try {
       writer.append("keyM".getBytes(), "valueM".getBytes());
       writer.append("keyA".getBytes(), "valueA".getBytes());
-      Assert.fail("Error on handling out of order keys.");
+      fail("Error on handling out of order keys.");
     } catch (Exception e) {
       // noop, expecting exceptions
       // e.printStackTrace();
@@ -452,7 +456,7 @@ public class TestTFileByteArrays {
       return;
     try {
       writer.append("keyX".getBytes(), -1, 4, "valueX".getBytes(), 0, 6);
-      Assert.fail("Error on handling negative offset.");
+      fail("Error on handling negative offset.");
     } catch (Exception e) {
       // noop, expecting exceptions
     }
@@ -469,7 +473,7 @@ public class TestTFileByteArrays {
     Scanner scanner = reader.createScanner();
     try {
       scanner.lowerBound("keyX".getBytes(), -1, 4);
-      Assert.fail("Error on handling negative offset.");
+      fail("Error on handling negative offset.");
     } catch (Exception e) {
       // noop, expecting exceptions
     } finally {
@@ -485,7 +489,7 @@ public class TestTFileByteArrays {
       return;
     try {
       writer.append("keyX".getBytes(), 0, -1, "valueX".getBytes(), 0, 6);
-      Assert.fail("Error on handling negative length.");
+      fail("Error on handling negative length.");
     } catch (Exception e) {
       // noop, expecting exceptions
     }
@@ -502,7 +506,7 @@ public class TestTFileByteArrays {
     Scanner scanner = reader.createScanner();
     try {
       scanner.lowerBound("keyX".getBytes(), 0, -1);
-      Assert.fail("Error on handling negative length.");
+      fail("Error on handling negative length.");
     } catch (Exception e) {
       // noop, expecting exceptions
     } finally {
@@ -525,7 +529,7 @@ public class TestTFileByteArrays {
       // test negative array offset
       try {
         scanner.seekTo("keyY".getBytes(), -1, 4);
-        Assert.fail("Failed to handle negative offset.");
+        fail("Failed to handle negative offset.");
       } catch (Exception e) {
         // noop, expecting exceptions
       }
@@ -533,7 +537,7 @@ public class TestTFileByteArrays {
       // test negative array length
       try {
         scanner.seekTo("keyY".getBytes(), 0, -2);
-        Assert.fail("Failed to handle negative key length.");
+        fail("Failed to handle negative key length.");
       } catch (Exception e) {
         // noop, expecting exceptions
       }
@@ -549,7 +553,7 @@ public class TestTFileByteArrays {
       return;
     long rawDataSize = writeRecords(10 * records1stBlock, false);
     if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) {
-      Assert.assertTrue(out.getPos() < rawDataSize);
+      assertTrue(out.getPos() < rawDataSize);
     }
     closeOutput();
   }
@@ -564,7 +568,7 @@ public class TestTFileByteArrays {
 
     try {
       writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf);
-      Assert.fail("Failed to catch file write not at position 0.");
+      fail("Failed to catch file write not at position 0.");
     } catch (Exception e) {
       // noop, expecting exceptions
     }
@@ -620,23 +624,23 @@ public class TestTFileByteArrays {
 
     try {
       for (int nx = 0; nx < count; nx++, scanner.advance()) {
-        Assert.assertFalse(scanner.atEnd());
-        // Assert.assertTrue(scanner.next());
+        assertFalse(scanner.atEnd());
+        // assertTrue(scanner.next());
 
         byte[] kbuf = new byte[BUF_SIZE];
         int klen = scanner.entry().getKeyLength();
         scanner.entry().getKey(kbuf);
-        Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
+        assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
             nx));
 
         byte[] vbuf = new byte[BUF_SIZE];
         int vlen = scanner.entry().getValueLength();
         scanner.entry().getValue(vbuf);
-        Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + nx);
+        assertEquals(new String(vbuf, 0, vlen), VALUE + nx);
       }
 
-      Assert.assertTrue(scanner.atEnd());
-      Assert.assertFalse(scanner.advance());
+      assertTrue(scanner.atEnd());
+      assertFalse(scanner.advance());
     } finally {
       scanner.close();
       reader.close();
@@ -647,7 +651,7 @@ public class TestTFileByteArrays {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
     scanner.seekTo(composeSortedKey(KEY, recordIndex).getBytes());
-    Assert.assertEquals(blockIndexExpected, scanner.currentLocation
+    assertEquals(blockIndexExpected, scanner.currentLocation
         .getBlockIndex());
     scanner.close();
     reader.close();
@@ -665,12 +669,12 @@ public class TestTFileByteArrays {
       byte[] vbuf = new byte[BUF_SIZE];
       int vlen = scanner.entry().getValueLength();
       scanner.entry().getValue(vbuf);
-      Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
+      assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
 
       byte[] kbuf = new byte[BUF_SIZE];
       int klen = scanner.entry().getKeyLength();
       scanner.entry().getKey(kbuf);
-      Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
+      assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
           recordIndex));
     } finally {
       scanner.close();
@@ -690,7 +694,7 @@ public class TestTFileByteArrays {
       byte[] kbuf1 = new byte[BUF_SIZE];
       int klen1 = scanner.entry().getKeyLength();
       scanner.entry().getKey(kbuf1);
-      Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
+      assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
           recordIndex));
 
       if (scanner.advance() && !scanner.atEnd()) {
@@ -698,7 +702,7 @@ public class TestTFileByteArrays {
         byte[] kbuf2 = new byte[BUF_SIZE];
         int klen2 = scanner.entry().getKeyLength();
         scanner.entry().getKey(kbuf2);
-        Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
+        assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
             recordIndex + 1));
       }
     } finally {
@@ -718,13 +722,13 @@ public class TestTFileByteArrays {
     byte[] vbuf1 = new byte[BUF_SIZE];
     int vlen1 = scanner.entry().getValueLength();
     scanner.entry().getValue(vbuf1);
-    Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
+    assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
 
     if (scanner.advance() && !scanner.atEnd()) {
       byte[] vbuf2 = new byte[BUF_SIZE];
       int vlen2 = scanner.entry().getValueLength();
       scanner.entry().getValue(vbuf2);
-      Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
+      assertEquals(new String(vbuf2, 0, vlen2), VALUE
           + (recordIndex + 1));
     }
 
@@ -743,17 +747,17 @@ public class TestTFileByteArrays {
     byte[] kbuf1 = new byte[BUF_SIZE];
     int klen1 = scanner.entry().getKeyLength();
     scanner.entry().getKey(kbuf1);
-    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
+    assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
         recordIndex));
 
     klen1 = scanner.entry().getKeyLength();
     scanner.entry().getKey(kbuf1);
-    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
+    assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
         recordIndex));
 
     klen1 = scanner.entry().getKeyLength();
     scanner.entry().getKey(kbuf1);
-    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
+    assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
         recordIndex));
 
     scanner.close();

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java

@@ -28,9 +28,9 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.file.tfile.TFile.Writer;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestTFileComparator2 {
   private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath();

+ 10 - 10
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java

@@ -17,13 +17,13 @@
 
 package org.apache.hadoop.io.file.tfile;
 
-import java.io.IOException;
+import static org.junit.jupiter.api.Assertions.fail;
 
-import org.junit.Assert;
+import java.io.IOException;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -58,7 +58,7 @@ public class TestTFileComparators {
   private int records1stBlock = 4480;
   private int records2ndBlock = 4263;
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     conf = new Configuration();
     path = new Path(ROOT, outputFile);
@@ -66,7 +66,7 @@ public class TestTFileComparators {
     out = fs.create(path);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     fs.delete(path, true);
   }
@@ -76,7 +76,7 @@ public class TestTFileComparators {
   public void testFailureBadComparatorNames() throws IOException {
     try {
       writer = new Writer(out, BLOCK_SIZE, compression, "badcmp", conf);
-      Assert.fail("Failed to catch unsupported comparator names");
+      fail("Failed to catch unsupported comparator names");
     }
     catch (Exception e) {
       // noop, expecting exceptions
@@ -91,7 +91,7 @@ public class TestTFileComparators {
       writer =
           new Writer(out, BLOCK_SIZE, compression,
               "jclass: some.non.existence.clazz", conf);
-      Assert.fail("Failed to catch unsupported comparator names");
+      fail("Failed to catch unsupported comparator names");
     }
     catch (Exception e) {
       // noop, expecting exceptions
@@ -106,7 +106,7 @@ public class TestTFileComparators {
       writer =
           new Writer(out, BLOCK_SIZE, compression,
               "jclass:org.apache.hadoop.io.file.tfile.Chunk", conf);
-      Assert.fail("Failed to catch unsupported comparator names");
+      fail("Failed to catch unsupported comparator names");
     }
     catch (Exception e) {
       // noop, expecting exceptions

+ 2 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java

@@ -22,6 +22,7 @@ import java.io.Serializable;
 
 import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.WritableComparator;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  * 
@@ -34,6 +35,7 @@ public class TestTFileJClassComparatorByteArrays extends TestTFileByteArrays {
   /**
    * Test non-compression codec, using the same test cases as in the ByteArrays.
    */
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     init(Compression.Algorithm.GZ.getName(),

+ 2 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsByteArrays.java

@@ -21,11 +21,13 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 
 import org.apache.hadoop.io.file.tfile.Compression.Algorithm;
+import org.junit.jupiter.api.BeforeEach;
 
 public class TestTFileLzoCodecsByteArrays extends TestTFileByteArrays {
   /**
    * Test LZO compression codec, using the same test cases as in the ByteArrays.
    */
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     skip = !(Algorithm.LZO.isSupported());

+ 2 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java

@@ -21,11 +21,13 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 
 import org.apache.hadoop.io.file.tfile.Compression.Algorithm;
+import org.junit.jupiter.api.BeforeEach;
 
 public class TestTFileLzoCodecsStreams extends TestTFileStreams {
   /**
    * Test LZO compression codec, using the same test cases as in the ByteArrays.
    */
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     skip = !(Algorithm.LZO.isSupported());

+ 3 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsByteArrays.java

@@ -17,12 +17,15 @@
 
 package org.apache.hadoop.io.file.tfile;
 
+import org.junit.jupiter.api.BeforeEach;
+
 import java.io.IOException;
 
 public class TestTFileNoneCodecsByteArrays extends TestTFileByteArrays {
   /**
    * Test non-compression codec, using the same test cases as in the ByteArrays.
    */
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     init(Compression.Algorithm.NONE.getName(), "memcmp", 24, 24);

+ 3 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java

@@ -17,6 +17,8 @@
 
 package org.apache.hadoop.io.file.tfile;
 
+import org.junit.jupiter.api.BeforeEach;
+
 import java.io.IOException;
 
 /**
@@ -30,6 +32,7 @@ public class TestTFileNoneCodecsJClassComparatorByteArrays extends TestTFileByte
   /**
    * Test non-compression codec, using the same test cases as in the ByteArrays.
    */
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     init(Compression.Algorithm.NONE.getName(),

+ 3 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsStreams.java

@@ -18,12 +18,15 @@
 
 package org.apache.hadoop.io.file.tfile;
 
+import org.junit.jupiter.api.BeforeEach;
+
 import java.io.IOException;
 
 public class TestTFileNoneCodecsStreams extends TestTFileStreams {
   /**
    * Test non-compression codec, using the same test cases as in the ByteArrays.
    */
+  @BeforeEach
   @Override
   public void setUp() throws IOException {
     init(Compression.Algorithm.NONE.getName(), "memcmp");

+ 5 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java

@@ -22,9 +22,9 @@ import java.util.Random;
 import java.util.StringTokenizer;
 
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 
 import org.apache.commons.cli.CommandLine;
@@ -60,7 +60,7 @@ public class TestTFileSeek {
   private DiscreteRNG keyLenGen;
   private KVGenerator kvGen;
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     if (options == null) {
       options = new MyOptions(new String[0]);
@@ -87,7 +87,7 @@ public class TestTFileSeek {
             options.dictSize);
   }
   
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     fs.delete(path, true);
   }

+ 5 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java

@@ -24,9 +24,9 @@ import java.util.Random;
 import java.util.StringTokenizer;
 
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
@@ -57,7 +57,7 @@ public class TestTFileSeqFileComparison {
   private DateFormat formatter;
   byte[][] dictionary;
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     if (options == null) {
       options = new MyOptions(new String[0]);
@@ -84,7 +84,7 @@ public class TestTFileSeqFileComparison {
     }
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     // do nothing
   }

+ 17 - 17
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java

@@ -19,10 +19,10 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 import java.util.Random;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -116,10 +116,10 @@ public class TestTFileSplit {
       BytesWritable value = new BytesWritable();
       long x=startRec;
       while (!scanner.atEnd()) {
-        assertEquals("Incorrect RecNum returned by scanner", scanner.getRecordNum(), x);
+        assertEquals(scanner.getRecordNum(), x, "Incorrect RecNum returned by scanner");
         scanner.entry().get(key, value);
         ++count;
-        assertEquals("Incorrect RecNum returned by scanner", scanner.getRecordNum(), x);
+        assertEquals(scanner.getRecordNum(), x, "Incorrect RecNum returned by scanner");
         scanner.advance();
         ++x;
       }
@@ -147,34 +147,34 @@ public class TestTFileSplit {
       end += (totalRecs / 2);
     end += (totalRecs / 2) + 1;
 
-    assertEquals("RecNum for offset=0 should be 0", 0, reader
-        .getRecordNumNear(0));
+    assertEquals(0, reader.getRecordNumNear(0),
+        "RecNum for offset=0 should be 0");
     for (long x : new long[] { fileLen, fileLen + 1, 2 * fileLen }) {
-      assertEquals("RecNum for offset>=fileLen should be total entries",
-          totalRecs, reader.getRecordNumNear(x));
+      assertEquals(totalRecs, reader.getRecordNumNear(x),
+          "RecNum for offset>=fileLen should be total entries");
     }
 
     for (long i = 0; i < 100; ++i) {
-      assertEquals("Locaton to RecNum conversion not symmetric", i, reader
-          .getRecordNumByLocation(reader.getLocationByRecordNum(i)));
+      assertEquals(i, reader.getRecordNumByLocation(reader.getLocationByRecordNum(i)),
+          "Locaton to RecNum conversion not symmetric");
     }
 
     for (long i = 1; i < 100; ++i) {
       long x = totalRecs - i;
-      assertEquals("Locaton to RecNum conversion not symmetric", x, reader
-          .getRecordNumByLocation(reader.getLocationByRecordNum(x)));
+      assertEquals(x, reader.getRecordNumByLocation(reader.getLocationByRecordNum(x)),
+          "Locaton to RecNum conversion not symmetric");
     }
 
     for (long i = begin; i < end; ++i) {
-      assertEquals("Locaton to RecNum conversion not symmetric", i, reader
-          .getRecordNumByLocation(reader.getLocationByRecordNum(i)));
+      assertEquals(i, reader.getRecordNumByLocation(reader.getLocationByRecordNum(i)),
+          "Locaton to RecNum conversion not symmetric");
     }
 
     for (int i = 0; i < 1000; ++i) {
       long x = random.nextLong() % totalRecs;
       if (x < 0) x += totalRecs;
-      assertEquals("Locaton to RecNum conversion not symmetric", x, reader
-          .getRecordNumByLocation(reader.getLocationByRecordNum(x)));
+      assertEquals(x, reader.getRecordNumByLocation(reader.getLocationByRecordNum(x)),
+          "Locaton to RecNum conversion not symmetric");
     }
   }
 

+ 8 - 8
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java

@@ -22,12 +22,12 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.util.Random;
 
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -68,7 +68,7 @@ public class TestTFileStreams {
     this.comparator = comparator;
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     conf = new Configuration();
     path = new Path(ROOT, outputFile);
@@ -77,7 +77,7 @@ public class TestTFileStreams {
     writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf);
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     if (!skip) {
       try {
@@ -307,7 +307,7 @@ public class TestTFileStreams {
     }
     outKey.close();
     outKey.close();
-    assertTrue("Multiple close should have no effect.", true);
+    assertTrue(true, "Multiple close should have no effect.");
   }
 
   @Test

+ 6 - 6
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 
-import org.junit.After;
+import org.junit.jupiter.api.AfterEach;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -29,11 +29,11 @@ import org.apache.hadoop.io.file.tfile.TFile.Reader;
 import org.apache.hadoop.io.file.tfile.TFile.Writer;
 import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class TestTFileUnsortedByteArrays {
   private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath();
@@ -64,7 +64,7 @@ public class TestTFileUnsortedByteArrays {
     this.records2ndBlock = numRecords2ndBlock;
   }
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     conf = new Configuration();
     path = new Path(ROOT, outputFile);
@@ -78,7 +78,7 @@ public class TestTFileUnsortedByteArrays {
     closeOutput();
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     fs.delete(path, true);
   }

+ 17 - 18
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java

@@ -21,8 +21,7 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 import java.util.Random;
 
-import org.junit.After;
-import org.junit.Assert;
+import org.junit.jupiter.api.AfterEach;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -30,10 +29,11 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestVLong {
   private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath();
@@ -42,7 +42,7 @@ public class TestVLong {
   private Path path;
   private String outputFile = "TestVLong";
 
-  @Before
+  @BeforeEach
   public void setUp() throws IOException {
     conf = new Configuration();
     path = new Path(ROOT, outputFile);
@@ -52,7 +52,7 @@ public class TestVLong {
     }
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     if (fs.exists(path)) {
       fs.delete(path, false);
@@ -66,9 +66,9 @@ public class TestVLong {
       Utils.writeVLong(out, i);
     }
     out.close();
-    Assert.assertEquals("Incorrect encoded size", (1 << Byte.SIZE) + 96, fs
+    assertEquals((1 << Byte.SIZE) + 96, fs
         .getFileStatus(
-        path).getLen());
+        path).getLen(), "Incorrect encoded size");
 
     FSDataInputStream in = fs.open(path);
     for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) {
@@ -97,36 +97,35 @@ public class TestVLong {
   @Test
   public void testVLongShort() throws IOException {
     long size = writeAndVerify(0);
-    Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 2
+    assertEquals((1 << Short.SIZE) * 2
         + ((1 << Byte.SIZE) - 40)
-        * (1 << Byte.SIZE) - 128 - 32, size);
+        * (1 << Byte.SIZE) - 128 - 32, size, "Incorrect encoded size");
   }
 
   @Test
   public void testVLong3Bytes() throws IOException {
     long size = writeAndVerify(Byte.SIZE);
-    Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 3
-        + ((1 << Byte.SIZE) - 32) * (1 << Byte.SIZE) - 40 - 1, size);
+    assertEquals((1 << Short.SIZE) * 3
+        + ((1 << Byte.SIZE) - 32) * (1 << Byte.SIZE) - 40 - 1, size, "Incorrect encoded size");
   }
 
   @Test
   public void testVLong4Bytes() throws IOException {
     long size = writeAndVerify(Byte.SIZE * 2);
-    Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 4
-        + ((1 << Byte.SIZE) - 16) * (1 << Byte.SIZE) - 32 - 2, size);
+    assertEquals((1 << Short.SIZE) * 4
+        + ((1 << Byte.SIZE) - 16) * (1 << Byte.SIZE) - 32 - 2, size, "Incorrect encoded size");
   }
 
   @Test
   public void testVLong5Bytes() throws IOException {
     long size = writeAndVerify(Byte.SIZE * 3);
-     Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 6 - 256
-        - 16 - 3, size);
+    assertEquals((1 << Short.SIZE) * 6 - 256 - 16 - 3, size, "Incorrect encoded size");
   }
 
   private void verifySixOrMoreBytes(int bytes) throws IOException {
     long size = writeAndVerify(Byte.SIZE * (bytes - 2));
-    Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE)
-        * (bytes + 1) - 256 - bytes + 1, size);
+    assertEquals((1 << Short.SIZE)
+        * (bytes + 1) - 256 - bytes + 1, size, "Incorrect encoded size");
   }
 
   @Test

+ 127 - 97
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java

@@ -61,11 +61,16 @@ import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat.*;
 import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assume.*;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -75,18 +80,19 @@ public class TestNativeIO {
 
   static final File TEST_DIR = GenericTestUtils.getTestDir("testnativeio");
 
-  @Before
+  @BeforeEach
   public void checkLoaded() {
     assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
   }
 
-  @Before
+  @BeforeEach
   public void setupTestDir() {
     FileUtil.fullyDelete(TEST_DIR);
     TEST_DIR.mkdirs();
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testFstat() throws Exception {
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
@@ -107,8 +113,8 @@ public class TestNativeIO {
     assertEquals(expectedOwner, owner);
     assertNotNull(stat.getGroup());
     assertTrue(!stat.getGroup().isEmpty());
-    assertEquals("Stat mode field should indicate a regular file", S_IFREG,
-      stat.getMode() & S_IFMT);
+    assertEquals(S_IFREG,
+        stat.getMode() & S_IFMT, "Stat mode field should indicate a regular file");
   }
 
   /**
@@ -117,7 +123,8 @@ public class TestNativeIO {
    * NOTE: this test is likely to fail on RHEL 6.0 which has a non-threadsafe
    * implementation of getpwuid_r.
    */
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testMultiThreadedFstat() throws Exception {
     assumeNotWindows();
 
@@ -138,8 +145,8 @@ public class TestNativeIO {
               assertEquals(System.getProperty("user.name"), stat.getOwner());
               assertNotNull(stat.getGroup());
               assertTrue(!stat.getGroup().isEmpty());
-              assertEquals("Stat mode field should indicate a regular file",
-                S_IFREG, stat.getMode() & S_IFMT);
+              assertEquals(S_IFREG, stat.getMode() & S_IFMT,
+                  "Stat mode field should indicate a regular file");
             } catch (Throwable t) {
               thrown.set(t);
             }
@@ -160,7 +167,8 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testFstatClosedFd() throws Exception {
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat2"));
@@ -173,7 +181,8 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testStat() throws Exception {
     Configuration conf = new Configuration();
     FileSystem fileSystem = FileSystem.getLocal(conf).getRawFileSystem();
@@ -232,7 +241,8 @@ public class TestNativeIO {
             () -> NativeIO.POSIX.getStat(testInvalidFilePath));
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testMultiThreadedStat() throws Exception {
     Configuration conf = new Configuration();
     FileSystem fileSystem = FileSystem.getLocal(conf).getRawFileSystem();
@@ -277,15 +287,16 @@ public class TestNativeIO {
     executorService.shutdown();
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testSetFilePointer() throws Exception {
     assumeWindows();
 
     LOG.info("Set a file pointer on Windows");
     try {
       File testfile = new File(TEST_DIR, "testSetFilePointer");
-      assertTrue("Create test subject",
-          testfile.exists() || testfile.createNewFile());
+      assertTrue(testfile.exists() || testfile.createNewFile(),
+          "Create test subject");
       FileWriter writer = new FileWriter(testfile);
       try {
         for (int i = 0; i < 200; i++)
@@ -311,7 +322,7 @@ public class TestNativeIO {
       FileReader reader = new FileReader(fd);
       try {
         int c = reader.read();
-        assertTrue("Unexpected character: " + c, c == 'b');
+        assertTrue(c == 'b', "Unexpected character: " + c);
       } catch (Exception readerException) {
         fail("Got unexpected exception: " + readerException.getMessage());
       } finally {
@@ -322,15 +333,16 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testCreateFile() throws Exception {
     assumeWindows();
 
     LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
     try {
       File testfile = new File(TEST_DIR, "testCreateFile");
-      assertTrue("Create test subject",
-          testfile.exists() || testfile.createNewFile());
+      assertTrue(testfile.exists() || testfile.createNewFile(),
+          "Create test subject");
 
       FileDescriptor fd = NativeIO.Windows.createFile(
           testfile.getCanonicalPath(),
@@ -347,7 +359,7 @@ public class TestNativeIO {
         File newfile = new File(TEST_DIR, "testRenamedFile");
 
         boolean renamed = testfile.renameTo(newfile);
-        assertTrue("Rename failed.", renamed);
+        assertTrue(renamed, "Rename failed.");
 
         fin.read();
       } catch (Exception e) {
@@ -363,7 +375,8 @@ public class TestNativeIO {
   }
 
   /** Validate access checks on Windows */
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testAccess() throws Exception {
     assumeWindows();
 
@@ -437,7 +450,8 @@ public class TestNativeIO {
         NativeIO.Windows.AccessRight.ACCESS_EXECUTE));
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testOpenMissingWithoutCreate() throws Exception {
     assumeNotWindows();
 
@@ -452,7 +466,8 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testOpenWithCreate() throws Exception {
     assumeNotWindows();
 
@@ -484,7 +499,8 @@ public class TestNativeIO {
    * Test that opens and closes a file 10000 times - this would crash with
    * "Too many open files" if we leaked fds using this access pattern.
    */
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testFDDoesntLeak() throws IOException {
     assumeNotWindows();
 
@@ -503,7 +519,8 @@ public class TestNativeIO {
   /**
    * Test basic chmod operation
    */
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testChmod() throws Exception {
     assumeNotWindows();
 
@@ -515,8 +532,7 @@ public class TestNativeIO {
     }
 
     File toChmod = new File(TEST_DIR, "testChmod");
-    assertTrue("Create test subject",
-               toChmod.exists() || toChmod.mkdir());
+    assertTrue(toChmod.exists() || toChmod.mkdir(), "Create test subject");
     NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0777);
     assertPermissions(toChmod, 0777);
     NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0000);
@@ -526,7 +542,8 @@ public class TestNativeIO {
   }
 
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testPosixFadvise() throws Exception {
     assumeNotWindows();
 
@@ -560,7 +577,8 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testSyncFileRange() throws Exception {
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testSyncFileRange"));
@@ -593,19 +611,22 @@ public class TestNativeIO {
     assertEquals(expected, perms.toShort());
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testGetUserName() throws IOException {
     assumeNotWindows();
     assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testGetGroupName() throws IOException {
     assumeNotWindows();
     assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testRenameTo() throws Exception {
     final File TEST_DIR = GenericTestUtils.getTestDir("renameTest") ;
     assumeTrue(TEST_DIR.mkdirs());
@@ -614,20 +635,20 @@ public class TestNativeIO {
     // Test attempting to rename a nonexistent file.
     try {
       NativeIO.renameTo(nonExistentFile, targetFile);
-      Assert.fail();
+      fail();
     } catch (NativeIOException e) {
       if (Path.WINDOWS) {
-        Assert.assertEquals(
+        assertEquals(
           String.format("The system cannot find the file specified.%n"),
           e.getMessage());
       } else {
-        Assert.assertEquals(Errno.ENOENT, e.getErrno());
+        assertEquals(Errno.ENOENT, e.getErrno());
       }
     }
 
     // Test renaming a file to itself.  It should succeed and do nothing.
     File sourceFile = new File(TEST_DIR, "source");
-    Assert.assertTrue(sourceFile.createNewFile());
+    assertTrue(sourceFile.createNewFile());
     NativeIO.renameTo(sourceFile, sourceFile);
 
     // Test renaming a source to a destination.
@@ -635,18 +656,18 @@ public class TestNativeIO {
 
     // Test renaming a source to a path which uses a file as a directory.
     sourceFile = new File(TEST_DIR, "source");
-    Assert.assertTrue(sourceFile.createNewFile());
+    assertTrue(sourceFile.createNewFile());
     File badTarget = new File(targetFile, "subdir");
     try {
       NativeIO.renameTo(sourceFile, badTarget);
-      Assert.fail();
+      fail();
     } catch (NativeIOException e) {
       if (Path.WINDOWS) {
-        Assert.assertEquals(
+        assertEquals(
           String.format("The parameter is incorrect.%n"),
           e.getMessage());
       } else {
-        Assert.assertEquals(Errno.ENOTDIR, e.getErrno());
+        assertEquals(Errno.ENOTDIR, e.getErrno());
       }
     }
 
@@ -655,7 +676,8 @@ public class TestNativeIO {
     NativeIO.renameTo(sourceFile, targetFile);
   }
 
-  @Test(timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testMlock() throws Exception {
     assumeTrue(NativeIO.isAvailable());
     final File TEST_FILE = GenericTestUtils.getTestDir("testMlockFile");
@@ -689,7 +711,7 @@ public class TestNativeIO {
       for (int i=0; i<fileSize; i++) {
         sum += mapbuf.get(i);
       }
-      assertEquals("Expected sums to be equal", bufSum, sum);
+      assertEquals(bufSum, sum, "Expected sums to be equal");
       // munmap the buffer, which also implicitly unlocks it
       NativeIO.POSIX.munmap(mapbuf);
     } finally {
@@ -702,13 +724,15 @@ public class TestNativeIO {
     }
   }
 
-  @Test(timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testGetMemlockLimit() throws Exception {
     assumeTrue(NativeIO.isAvailable());
     NativeIO.getMemlockLimit();
   }
 
-  @Test (timeout = 30000)
+  @Test
+  @Timeout(value = 30)
   public void testCopyFileUnbuffered() throws Exception {
     final String METHOD_NAME = GenericTestUtils.getMethodName();
     File srcFile = new File(TEST_DIR, METHOD_NAME + ".src.dat");
@@ -731,7 +755,7 @@ public class TestNativeIO {
         mapBuf.put(bytesToWrite);
       }
       NativeIO.copyFileUnbuffered(srcFile, dstFile);
-      Assert.assertEquals(srcFile.length(), dstFile.length());
+      assertEquals(srcFile.length(), dstFile.length());
     } finally {
       IOUtils.cleanupWithLogger(LOG, channel);
       IOUtils.cleanupWithLogger(LOG, raSrcFile);
@@ -739,54 +763,57 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testNativePosixConsts() {
     assumeNotWindows("Native POSIX constants not required for Windows");
-    assertTrue("Native 0_RDONLY const not set", O_RDONLY >= 0);
-    assertTrue("Native 0_WRONLY const not set", O_WRONLY >= 0);
-    assertTrue("Native 0_RDWR const not set", O_RDWR >= 0);
-    assertTrue("Native 0_CREAT const not set", O_CREAT >= 0);
-    assertTrue("Native 0_EXCL const not set", O_EXCL >= 0);
-    assertTrue("Native 0_NOCTTY const not set", O_NOCTTY >= 0);
-    assertTrue("Native 0_TRUNC const not set", O_TRUNC >= 0);
-    assertTrue("Native 0_APPEND const not set", O_APPEND >= 0);
-    assertTrue("Native 0_NONBLOCK const not set", O_NONBLOCK >= 0);
-    assertTrue("Native 0_SYNC const not set", O_SYNC >= 0);
-    assertTrue("Native S_IFMT const not set", S_IFMT >= 0);
-    assertTrue("Native S_IFIFO const not set", S_IFIFO >= 0);
-    assertTrue("Native S_IFCHR const not set", S_IFCHR >= 0);
-    assertTrue("Native S_IFDIR const not set", S_IFDIR >= 0);
-    assertTrue("Native S_IFBLK const not set", S_IFBLK >= 0);
-    assertTrue("Native S_IFREG const not set", S_IFREG >= 0);
-    assertTrue("Native S_IFLNK const not set", S_IFLNK >= 0);
-    assertTrue("Native S_IFSOCK const not set", S_IFSOCK >= 0);
-    assertTrue("Native S_ISUID const not set", S_ISUID >= 0);
-    assertTrue("Native S_ISGID const not set", S_ISGID >= 0);
-    assertTrue("Native S_ISVTX const not set", S_ISVTX >= 0);
-    assertTrue("Native S_IRUSR const not set", S_IRUSR >= 0);
-    assertTrue("Native S_IWUSR const not set", S_IWUSR >= 0);
-    assertTrue("Native S_IXUSR const not set", S_IXUSR >= 0);
-  }
-
-  @Test (timeout=10000)
+    assertTrue(O_RDONLY >= 0, "Native 0_RDONLY const not set");
+    assertTrue(O_WRONLY >= 0, "Native 0_WRONLY const not set");
+    assertTrue(O_RDWR >= 0, "Native 0_RDWR const not set");
+    assertTrue(O_CREAT >= 0, "Native 0_CREAT const not set");
+    assertTrue(O_EXCL >= 0, "Native 0_EXCL const not set");
+    assertTrue(O_NOCTTY >= 0, "Native 0_NOCTTY const not set");
+    assertTrue(O_TRUNC >= 0, "Native 0_TRUNC const not set");
+    assertTrue(O_APPEND >= 0, "Native 0_APPEND const not set");
+    assertTrue(O_NONBLOCK >= 0, "Native 0_NONBLOCK const not set");
+    assertTrue(O_SYNC >= 0, "Native 0_SYNC const not set");
+    assertTrue(S_IFMT >= 0, "Native S_IFMT const not set");
+    assertTrue(S_IFIFO >= 0, "Native S_IFIFO const not set");
+    assertTrue(S_IFCHR >= 0, "Native S_IFCHR const not set");
+    assertTrue(S_IFDIR >= 0, "Native S_IFDIR const not set");
+    assertTrue(S_IFBLK >= 0, "Native S_IFBLK const not set");
+    assertTrue(S_IFREG >= 0, "Native S_IFREG const not set");
+    assertTrue(S_IFLNK >= 0, "Native S_IFLNK const not set");
+    assertTrue(S_IFSOCK >= 0, "Native S_IFSOCK const not set");
+    assertTrue(S_ISUID >= 0, "Native S_ISUID const not set");
+    assertTrue(S_ISGID >= 0, "Native S_ISGID const not set");
+    assertTrue(S_ISVTX >= 0, "Native S_ISVTX const not set");
+    assertTrue(S_IRUSR >= 0, "Native S_IRUSR const not set");
+    assertTrue(S_IWUSR >= 0, "Native S_IWUSR const not set");
+    assertTrue(S_IXUSR >= 0, "Native S_IXUSR const not set");
+  }
+
+  @Test
+  @Timeout(value = 10)
   public void testNativeFadviseConsts() {
-    assumeTrue("Fadvise constants not supported", fadvisePossible);
-    assertTrue("Native POSIX_FADV_NORMAL const not set",
-      POSIX_FADV_NORMAL >= 0);
-    assertTrue("Native POSIX_FADV_RANDOM const not set",
-      POSIX_FADV_RANDOM >= 0);
-    assertTrue("Native POSIX_FADV_SEQUENTIAL const not set",
-      POSIX_FADV_SEQUENTIAL >= 0);
-    assertTrue("Native POSIX_FADV_WILLNEED const not set",
-      POSIX_FADV_WILLNEED >= 0);
-    assertTrue("Native POSIX_FADV_DONTNEED const not set",
-      POSIX_FADV_DONTNEED >= 0);
-    assertTrue("Native POSIX_FADV_NOREUSE const not set",
-      POSIX_FADV_NOREUSE >= 0);
-  }
-
-
-  @Test (timeout=10000)
+    assumeTrue(fadvisePossible, "Fadvise constants not supported");
+    assertTrue(POSIX_FADV_NORMAL >= 0,
+        "Native POSIX_FADV_NORMAL const not set");
+    assertTrue(POSIX_FADV_RANDOM >= 0,
+        "Native POSIX_FADV_RANDOM const not set");
+    assertTrue(POSIX_FADV_SEQUENTIAL >= 0,
+        "Native POSIX_FADV_SEQUENTIAL const not set");
+    assertTrue(POSIX_FADV_WILLNEED >= 0,
+        "Native POSIX_FADV_WILLNEED const not set");
+    assertTrue(POSIX_FADV_DONTNEED >= 0,
+        "Native POSIX_FADV_DONTNEED const not set");
+    assertTrue(POSIX_FADV_NOREUSE >= 0,
+        "Native POSIX_FADV_NOREUSE const not set");
+  }
+
+
+  @Test
+  @Timeout(value = 10)
   public void testPmemCheckParameters() {
     assumeNotWindows("Native PMDK not supported on Windows");
     // Skip testing while the build or environment does not support PMDK
@@ -817,7 +844,8 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testPmemMapMultipleFiles() {
     assumeNotWindows("Native PMDK not supported on Windows");
     // Skip testing while the build or environment does not support PMDK
@@ -847,7 +875,8 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testPmemMapBigFile() {
     assumeNotWindows("Native PMDK not supported on Windows");
     // Skip testing while the build or environment does not support PMDK
@@ -871,7 +900,8 @@ public class TestNativeIO {
     }
   }
 
-  @Test (timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testPmemCopy() throws IOException {
     assumeNotWindows("Native PMDK not supported on Windows");
     // Skip testing while the build or environment does not support PMDK

+ 8 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIoInit.java

@@ -17,12 +17,13 @@
  */
 package org.apache.hadoop.io.nativeio;
 
-import static org.junit.Assume.assumeTrue;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 import java.io.IOException;
 
 import org.apache.hadoop.fs.Path;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 /**
  * Separate class to ensure forked Tests load the static blocks again.
@@ -40,7 +41,8 @@ public class TestNativeIoInit {
    * Expected: Loading these two static blocks separately should not result in
    * deadlock.
    */
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testDeadlockLinux() throws Exception {
     Thread one = new Thread() {
       @Override
@@ -60,9 +62,10 @@ public class TestNativeIoInit {
     two.join();
   }
 
-  @Test(timeout = 10000)
+  @Test
+  @Timeout(value = 10)
   public void testDeadlockWindows() throws Exception {
-    assumeTrue("Expected windows", Path.WINDOWS);
+    assumeTrue(Path.WINDOWS, "Expected windows");
     Thread one = new Thread() {
       @Override
       public void run() {

+ 23 - 16
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java

@@ -17,15 +17,19 @@
  */
 package org.apache.hadoop.io.nativeio;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 
-import org.junit.Assert;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 import org.apache.commons.lang3.SystemUtils;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -39,13 +43,14 @@ public class TestSharedFileDescriptorFactory {
 
   private static final File TEST_BASE = GenericTestUtils.getTestDir();
 
-  @Before
+  @BeforeEach
   public void setup() throws Exception {
-    Assume.assumeTrue(null ==
+    assumeTrue(null ==
         SharedFileDescriptorFactory.getLoadingFailureReason());
   }
 
-  @Test(timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testReadAndWrite() throws Exception {
     File path = new File(TEST_BASE, "testReadAndWrite");
     path.mkdirs();
@@ -57,7 +62,7 @@ public class TestSharedFileDescriptorFactory {
     FileOutputStream outStream = new FileOutputStream(inStream.getFD());
     outStream.write(101);
     inStream.getChannel().position(0);
-    Assert.assertEquals(101, inStream.read());
+    assertEquals(101, inStream.read());
     inStream.close();
     outStream.close();
     FileUtil.fullyDelete(path);
@@ -69,10 +74,11 @@ public class TestSharedFileDescriptorFactory {
     fos.close();
   }
   
-  @Test(timeout=10000)
+  @Test
+  @Timeout(value = 10)
   public void testCleanupRemainders() throws Exception {
-    Assume.assumeTrue(NativeIO.isAvailable());
-    Assume.assumeTrue(SystemUtils.IS_OS_UNIX);
+    assumeTrue(NativeIO.isAvailable());
+    assumeTrue(SystemUtils.IS_OS_UNIX);
     File path = new File(TEST_BASE, "testCleanupRemainders");
     path.mkdirs();
     String remainder1 = path.getAbsolutePath() + 
@@ -85,12 +91,13 @@ public class TestSharedFileDescriptorFactory {
         new String[] { path.getAbsolutePath() });
     // creating the SharedFileDescriptorFactory should have removed 
     // the remainders
-    Assert.assertFalse(new File(remainder1).exists());
-    Assert.assertFalse(new File(remainder2).exists());
+    assertFalse(new File(remainder1).exists());
+    assertFalse(new File(remainder2).exists());
     FileUtil.fullyDelete(path);
   }
   
-  @Test(timeout=60000)
+  @Test
+  @Timeout(value = 60)
   public void testDirectoryFallbacks() throws Exception {
     File nonExistentPath = new File(TEST_BASE, "nonexistent");
     File permissionDeniedPath = new File("/");
@@ -100,7 +107,7 @@ public class TestSharedFileDescriptorFactory {
       SharedFileDescriptorFactory.create("shm_", 
           new String[] { nonExistentPath.getAbsolutePath(),
                           permissionDeniedPath.getAbsolutePath() });
-      Assert.fail();
+      fail();
     } catch (IOException e) {
     }
     SharedFileDescriptorFactory factory =
@@ -108,7 +115,7 @@ public class TestSharedFileDescriptorFactory {
             new String[] { nonExistentPath.getAbsolutePath(),
                             permissionDeniedPath.getAbsolutePath(),
                             goodPath.getAbsolutePath() } );
-    Assert.assertEquals(goodPath.getAbsolutePath(), factory.getPath());
+    assertEquals(goodPath.getAbsolutePath(), factory.getPath());
     FileUtil.fullyDelete(goodPath);
   }
 }

+ 18 - 22
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestConnectionRetryPolicy.java

@@ -18,14 +18,16 @@
 
 package org.apache.hadoop.io.retry;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.PathIOException;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.RpcNoSuchMethodException;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 /**
  * This class mainly tests behaviors of various retry policies in connection
@@ -67,7 +69,8 @@ public class TestConnectionRetryPolicy {
         "");
   }
 
-  @Test(timeout = 60000)
+  @Test
+  @Timeout(value = 60)
   public void testDefaultRetryPolicyEquivalence() {
     RetryPolicy rp1 = null;
     RetryPolicy rp2 = null;
@@ -103,34 +106,29 @@ public class TestConnectionRetryPolicy {
     /* test enabled and different specifications */
     rp1 = getDefaultRetryPolicy(true, "20000,3");
     rp2 = getDefaultRetryPolicy(true, "30000,4");
-    assertNotEquals("should not be equal", rp1, rp2);
-    assertNotEquals(
-        "should not have the same hash code",
-        rp1.hashCode(),
-        rp2.hashCode());
+    assertNotEquals(rp1, rp2, "should not be equal");
+    assertNotEquals(rp1.hashCode(), rp2.hashCode(),
+        "should not have the same hash code");
 
     /* test disabled and the same specifications */
     rp1 = getDefaultRetryPolicy(false, "40000,5");
     rp2 = getDefaultRetryPolicy(false, "40000,5");
-    assertEquals("should be equal", rp1, rp2);
-    assertEquals(
-        "should have the same hash code",
-        rp1, rp2);
+    assertEquals(rp1, rp2, "should be equal");
+    assertEquals(rp1, rp2, "should have the same hash code");
 
     /* test the disabled and different specifications */
     rp1 = getDefaultRetryPolicy(false, "50000,6");
     rp2 = getDefaultRetryPolicy(false, "60000,7");
-    assertEquals("should be equal", rp1, rp2);
-    assertEquals(
-        "should have the same hash code",
-        rp1, rp2);
+    assertEquals(rp1, rp2, "should be equal");
+    assertEquals(rp1, rp2, "should have the same hash code");
   }
 
   public static RetryPolicy newTryOnceThenFail() {
     return new RetryPolicies.TryOnceThenFail();
   }
 
-  @Test(timeout = 60000)
+  @Test
+  @Timeout(value = 60)
   public void testTryOnceThenFailEquivalence() throws Exception {
     final RetryPolicy rp1 = newTryOnceThenFail();
     final RetryPolicy rp2 = newTryOnceThenFail();
@@ -142,11 +140,9 @@ public class TestConnectionRetryPolicy {
     for (int i = 0; i < polices.length; i++) {
       for (int j = 0; j < polices.length; j++) {
         if (i != j) {
-          assertEquals("should be equal", polices[i], polices[j]);
-          assertEquals(
-              "should have the same hash code",
-              polices[i].hashCode(),
-              polices[j].hashCode());
+          assertEquals(polices[i], polices[j], "should be equal");
+          assertEquals(polices[i].hashCode(),
+              polices[j].hashCode(), "should have the same hash code");
         }
       }
     }

+ 7 - 10
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java

@@ -21,31 +21,28 @@ package org.apache.hadoop.io.retry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.RetriableException;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
 
 import java.io.IOException;
-import java.util.concurrent.TimeUnit;
 
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * Test the behavior of the default retry policy.
  */
+@Timeout(30)
 public class TestDefaultRetryPolicy {
-  @Rule
-  public Timeout timeout = new Timeout(30000, TimeUnit.MILLISECONDS);
 
   /** Verify FAIL < RETRY < FAILOVER_AND_RETRY. */
   @Test
   public void testRetryDecisionOrdering() throws Exception {
-    Assert.assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo(
+    assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo(
         RetryPolicy.RetryAction.RetryDecision.RETRY) < 0);
-    Assert.assertTrue(RetryPolicy.RetryAction.RetryDecision.RETRY.compareTo(
+    assertTrue(RetryPolicy.RetryAction.RetryDecision.RETRY.compareTo(
         RetryPolicy.RetryAction.RetryDecision.FAILOVER_AND_RETRY) < 0);
-    Assert.assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo(
+    assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo(
         RetryPolicy.RetryAction.RetryDecision.FAILOVER_AND_RETRY) < 0);
   }
 

+ 6 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java

@@ -17,7 +17,9 @@
  */
 package org.apache.hadoop.io.retry;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.IOException;
 import java.util.concurrent.CountDownLatch;
@@ -26,7 +28,7 @@ import org.apache.hadoop.io.retry.UnreliableImplementation.TypeOfExceptionToFail
 import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException;
 import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.util.ThreadUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestFailoverProxy {
 
@@ -354,8 +356,8 @@ public class TestFailoverProxy {
       unreliable.failsIfIdentifierDoesntMatch("no-such-identifier");
       fail("Should have thrown *some* exception");
     } catch (Exception e) {
-      assertTrue("Expected IOE but got " + e.getClass(),
-          e instanceof IOException);
+      assertTrue(e instanceof IOException,
+          "Expected IOE but got " + e.getClass());
     }
   }
 }

+ 4 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java

@@ -26,8 +26,8 @@ import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.AccessControlException;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
@@ -43,7 +43,7 @@ import java.util.concurrent.atomic.AtomicReference;
 import javax.security.sasl.SaslException;
 
 import static org.apache.hadoop.io.retry.RetryPolicies.*;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.ArgumentMatchers.anyBoolean;
 import static org.mockito.ArgumentMatchers.anyInt;
@@ -62,7 +62,7 @@ public class TestRetryProxy {
   private UnreliableImplementation unreliableImpl;
   private RetryAction caughtRetryAction = null;
   
-  @Before
+  @BeforeEach
   public void setUp() throws Exception {
     unreliableImpl = new UnreliableImplementation();
   }

+ 15 - 15
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java

@@ -19,10 +19,10 @@ package org.apache.hadoop.io.serializer;
 
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertNotNull;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -38,7 +38,7 @@ public class TestSerializationFactory {
   static Configuration conf;
   static SerializationFactory factory;
 
-  @BeforeClass
+  @BeforeAll
   public static void setup() throws Exception {
     conf = new Configuration();
     factory = new SerializationFactory(conf);
@@ -74,21 +74,21 @@ public class TestSerializationFactory {
   @Test
   public void testGetSerializer() {
     // Test that a valid serializer class is returned when its present
-    assertNotNull("A valid class must be returned for default Writable SerDe",
-        factory.getSerializer(Writable.class));
+    assertNotNull(factory.getSerializer(Writable.class),
+        "A valid class must be returned for default Writable SerDe");
     // Test that a null is returned when none can be found.
-    assertNull("A null should be returned if there are no serializers found.",
-        factory.getSerializer(TestSerializationFactory.class));
+    assertNull(factory.getSerializer(TestSerializationFactory.class),
+        "A null should be returned if there are no serializers found.");
   }
 
   @Test
   public void testGetDeserializer() {
     // Test that a valid serializer class is returned when its present
-    assertNotNull("A valid class must be returned for default Writable SerDe",
-        factory.getDeserializer(Writable.class));
+    assertNotNull(factory.getDeserializer(Writable.class),
+        "A valid class must be returned for default Writable SerDe");
     // Test that a null is returned when none can be found.
-    assertNull("A null should be returned if there are no deserializers found",
-        factory.getDeserializer(TestSerializationFactory.class));
+    assertNull(factory.getDeserializer(TestSerializationFactory.class),
+        "A null should be returned if there are no deserializers found");
   }
 
   @Test
@@ -96,7 +96,7 @@ public class TestSerializationFactory {
     Configuration conf = new Configuration();
     conf.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, " org.apache.hadoop.io.serializer.WritableSerialization ");
     SerializationFactory factory = new SerializationFactory(conf);
-    assertNotNull("Valid class must be returned",
-     factory.getSerializer(LongWritable.class));
+    assertNotNull(factory.getSerializer(LongWritable.class),
+        "Valid class must be returned");
    }
 }

+ 3 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java

@@ -31,8 +31,9 @@ import org.apache.hadoop.io.TestGenericWritable.Baz;
 import org.apache.hadoop.io.TestGenericWritable.FooGenericWritable;
 import org.apache.hadoop.io.WritableComparator;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
 
 public class TestWritableSerialization {
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java

@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.io.serializer.avro;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.SerializationTestUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestAvroSerialization {