瀏覽代碼

HADOOP-19427. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-compat-bench. (#7619)

* HADOOP-19427. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-compat-bench.

Co-authored-by: Chris Nauroth <cnauroth@apache.org>
Reviewed-by: Chris Nauroth <cnauroth@apache.org>
Signed-off-by: Shilun Fan <slfan1989@apache.org>
slfan1989 1 周之前
父節點
當前提交
809b1b5973

+ 25 - 0
hadoop-tools/hadoop-compat-bench/pom.xml

@@ -69,6 +69,31 @@
       <artifactId>mockito-inline</artifactId>
       <artifactId>mockito-inline</artifactId>
       <scope>test</scope>
       <scope>test</scope>
     </dependency>
     </dependency>
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-engine</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-params</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.platform</groupId>
+      <artifactId>junit-platform-launcher</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.vintage</groupId>
+      <artifactId>junit-vintage-engine</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   </dependencies>
 
 
   <build>
   <build>

+ 5 - 4
hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatDefaultSuites.java

@@ -17,12 +17,13 @@
  */
  */
 package org.apache.hadoop.fs.compat.common;
 package org.apache.hadoop.fs.compat.common;
 
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatMiniCluster;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatMiniCluster;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestCommand;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestCommand;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 
 public class TestHdfsCompatDefaultSuites {
 public class TestHdfsCompatDefaultSuites {
   @Test
   @Test
@@ -35,7 +36,7 @@ public class TestHdfsCompatDefaultSuites {
       HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "ALL", conf);
       HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "ALL", conf);
       cmd.initialize();
       cmd.initialize();
       HdfsCompatReport report = cmd.apply();
       HdfsCompatReport report = cmd.apply();
-      Assert.assertEquals(0, report.getFailedCase().size());
+      assertEquals(0, report.getFailedCase().size());
       new HdfsCompatTool(conf).printReport(report, System.out);
       new HdfsCompatTool(conf).printReport(report, System.out);
     } finally {
     } finally {
       cluster.shutdown();
       cluster.shutdown();
@@ -52,7 +53,7 @@ public class TestHdfsCompatDefaultSuites {
       HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "TPCDS", conf);
       HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "TPCDS", conf);
       cmd.initialize();
       cmd.initialize();
       HdfsCompatReport report = cmd.apply();
       HdfsCompatReport report = cmd.apply();
-      Assert.assertEquals(0, report.getFailedCase().size());
+      assertEquals(0, report.getFailedCase().size());
       new HdfsCompatTool(conf).printReport(report, System.out);
       new HdfsCompatTool(conf).printReport(report, System.out);
     } finally {
     } finally {
       cluster.shutdown();
       cluster.shutdown();

+ 8 - 8
hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatFsCommand.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.hadoop.fs.compat.common;
 package org.apache.hadoop.fs.compat.common;
 
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatMiniCluster;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatMiniCluster;
@@ -25,8 +26,7 @@ import org.apache.hadoop.fs.compat.cases.HdfsCompatAclTestCases;
 import org.apache.hadoop.fs.compat.cases.HdfsCompatMkdirTestCases;
 import org.apache.hadoop.fs.compat.cases.HdfsCompatMkdirTestCases;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.lang.reflect.Field;
 import java.lang.reflect.Field;
@@ -47,8 +47,8 @@ public class TestHdfsCompatFsCommand {
       HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
       HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
       cmd.initialize();
       cmd.initialize();
       HdfsCompatReport report = cmd.apply();
       HdfsCompatReport report = cmd.apply();
-      Assert.assertEquals(7, report.getPassedCase().size());
-      Assert.assertEquals(0, report.getFailedCase().size());
+      assertEquals(7, report.getPassedCase().size());
+      assertEquals(0, report.getFailedCase().size());
       show(conf, report);
       show(conf, report);
     } finally {
     } finally {
       if (cluster != null) {
       if (cluster != null) {
@@ -65,8 +65,8 @@ public class TestHdfsCompatFsCommand {
     HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
     HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
     cmd.initialize();
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(1, report.getPassedCase().size());
-    Assert.assertEquals(6, report.getFailedCase().size());
+    assertEquals(1, report.getPassedCase().size());
+    assertEquals(6, report.getFailedCase().size());
     show(conf, report);
     show(conf, report);
     cleanup(cmd, conf);
     cleanup(cmd, conf);
   }
   }
@@ -79,8 +79,8 @@ public class TestHdfsCompatFsCommand {
     HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
     HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
     cmd.initialize();
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(0, report.getPassedCase().size());
-    Assert.assertEquals(6, report.getFailedCase().size());
+    assertEquals(0, report.getPassedCase().size());
+    assertEquals(6, report.getFailedCase().size());
     show(conf, report);
     show(conf, report);
     cleanup(cmd, conf);
     cleanup(cmd, conf);
   }
   }

+ 6 - 6
hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatInterfaceCoverage.java

@@ -17,12 +17,12 @@
  */
  */
 package org.apache.hadoop.fs.compat.common;
 package org.apache.hadoop.fs.compat.common;
 
 
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 
 import org.apache.hadoop.fs.compat.cases.HdfsCompatBasics;
 import org.apache.hadoop.fs.compat.cases.HdfsCompatBasics;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Disabled;
 
 
 import java.lang.reflect.Method;
 import java.lang.reflect.Method;
 import java.util.HashSet;
 import java.util.HashSet;
@@ -30,13 +30,13 @@ import java.util.Set;
 
 
 public class TestHdfsCompatInterfaceCoverage {
 public class TestHdfsCompatInterfaceCoverage {
   @Test
   @Test
-  @Ignore
+  @Disabled
   public void testFsCompatibility() {
   public void testFsCompatibility() {
     Set<String> publicMethods = getPublicInterfaces(FileSystem.class);
     Set<String> publicMethods = getPublicInterfaces(FileSystem.class);
     Set<String> targets = getTargets(HdfsCompatBasics.class);
     Set<String> targets = getTargets(HdfsCompatBasics.class);
     for (String publicMethod : publicMethods) {
     for (String publicMethod : publicMethods) {
-      Assert.assertTrue("Method not tested: " + publicMethod,
-          targets.contains(publicMethod));
+      assertTrue(targets.contains(publicMethod),
+          "Method not tested: " + publicMethod);
     }
     }
   }
   }
 
 

+ 10 - 10
hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatShellCommand.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.hadoop.fs.compat.common;
 package org.apache.hadoop.fs.compat.common;
 
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
@@ -24,10 +25,9 @@ import org.apache.hadoop.fs.compat.hdfs.HdfsCompatMiniCluster;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestCommand;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestCommand;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestShellScope;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestShellScope;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 
 import java.io.File;
 import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
@@ -36,13 +36,13 @@ import java.nio.file.Files;
 public class TestHdfsCompatShellCommand {
 public class TestHdfsCompatShellCommand {
   private HdfsCompatMiniCluster cluster;
   private HdfsCompatMiniCluster cluster;
 
 
-  @Before
+  @BeforeEach
   public void runCluster() throws IOException {
   public void runCluster() throws IOException {
     this.cluster = new HdfsCompatMiniCluster();
     this.cluster = new HdfsCompatMiniCluster();
     this.cluster.start();
     this.cluster.start();
   }
   }
 
 
-  @After
+  @AfterEach
   public void shutdownCluster() {
   public void shutdownCluster() {
     this.cluster.shutdown();
     this.cluster.shutdown();
     this.cluster = null;
     this.cluster = null;
@@ -55,8 +55,8 @@ public class TestHdfsCompatShellCommand {
     HdfsCompatCommand cmd = new TestCommand(uri, conf);
     HdfsCompatCommand cmd = new TestCommand(uri, conf);
     cmd.initialize();
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(3, report.getPassedCase().size());
-    Assert.assertEquals(0, report.getFailedCase().size());
+    assertEquals(3, report.getPassedCase().size());
+    assertEquals(0, report.getFailedCase().size());
     show(conf, report);
     show(conf, report);
   }
   }
 
 
@@ -67,8 +67,8 @@ public class TestHdfsCompatShellCommand {
     HdfsCompatCommand cmd = new TestSkipCommand(uri, conf);
     HdfsCompatCommand cmd = new TestSkipCommand(uri, conf);
     cmd.initialize();
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(2, report.getPassedCase().size());
-    Assert.assertEquals(0, report.getFailedCase().size());
+    assertEquals(2, report.getPassedCase().size());
+    assertEquals(0, report.getFailedCase().size());
     show(conf, report);
     show(conf, report);
   }
   }