Selaa lähdekoodia

HDFS-3583. Convert remaining tests to Junit4. Contributed by Andrew Wang.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1362753 13f79535-47bb-0310-9956-ffa450edef68
Aaron Myers 13 vuotta sitten
vanhempi
commit
e2253b539e
100 muutettua tiedostoa jossa 1162 lisäystä ja 928 poistoa
  1. 12 12
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
  2. 2 2
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java
  3. 4 4
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java
  4. 27 25
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
  5. 11 9
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestRunnableCallable.java
  6. 15 13
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestXException.java
  7. 11 8
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestBaseService.java
  8. 138 132
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java
  9. 3 3
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServerConstructor.java
  10. 23 20
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/hadoop/TestFileSystemAccessService.java
  11. 94 90
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/instrumentation/TestInstrumentationService.java
  12. 5 4
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/scheduler/TestSchedulerService.java
  13. 3 3
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/DummyGroupMapping.java
  14. 8 6
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestGroupsService.java
  15. 14 13
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestProxyUserService.java
  16. 13 10
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java
  17. 31 27
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestMDCFilter.java
  18. 8 7
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java
  19. 13 12
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java
  20. 29 27
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java
  21. 5 4
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java
  22. 10 7
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java
  23. 10 7
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java
  24. 15 13
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestParam.java
  25. 19 15
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java
  26. 4 4
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java
  27. 3 3
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java
  28. 5 5
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java
  29. 7 6
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java
  30. 28 25
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java
  31. 23 21
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java
  32. 3 3
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java
  33. 5 5
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
  34. 9 3
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/hdfs/TestRaidDfs.java
  35. 17 17
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixer.java
  36. 0 1
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerBlockFixDist.java
  37. 7 7
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerDistConcurrency.java
  38. 0 1
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerGeneratedBlockDist.java
  39. 0 1
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerParityBlockFixDist.java
  40. 11 6
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestDirectoryTraversal.java
  41. 9 3
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestErasureCodes.java
  42. 13 3
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestGaloisField.java
  43. 12 5
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestHarIndexParser.java
  44. 8 7
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidFilter.java
  45. 10 8
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidHar.java
  46. 15 10
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidNode.java
  47. 16 23
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidPurge.java
  48. 14 12
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidShell.java
  49. 12 15
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidShellFsck.java
  50. 10 7
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestReedSolomonDecoder.java
  51. 6 16
      hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestReedSolomonEncoder.java
  52. 2 0
      hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
  53. 5 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java
  54. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java
  55. 17 10
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
  56. 4 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java
  57. 0 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestResolveHdfsSymlink.java
  58. 7 6
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestUrlStreamHandler.java
  59. 2 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/loadGenerator/TestLoadGenerator.java
  60. 10 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/permission/TestStickyBit.java
  61. 16 24
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsDefaultValue.java
  62. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsFileStatusHdfs.java
  63. 9 10
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/AppendTestUtil.java
  64. 0 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BenchmarkThroughput.java
  65. 10 9
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java
  66. 0 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
  67. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/FileAppendTest4.java
  68. 5 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAbandonBlock.java
  69. 6 6
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java
  70. 8 9
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockMissingException.java
  71. 5 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java
  72. 6 7
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientBlockVerification.java
  73. 1 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientProtocolForPipelineRecovery.java
  74. 10 14
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestConnCache.java
  75. 6 4
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestCrcCorruption.java
  76. 12 7
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSAddressConfig.java
  77. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientExcludedNodes.java
  78. 16 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java
  79. 10 5
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSFinalize.java
  80. 11 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java
  81. 15 8
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSPermission.java
  82. 5 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRemove.java
  83. 6 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRename.java
  84. 10 6
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
  85. 22 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
  86. 5 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShellGenericOptions.java
  87. 10 7
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStartupVersions.java
  88. 21 11
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStorageStateRecovery.java
  89. 8 7
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
  90. 16 5
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
  91. 24 12
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
  92. 4 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java
  93. 11 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java
  94. 10 4
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeDeath.java
  95. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeRegistration.java
  96. 7 4
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java
  97. 7 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDefaultNameNodePort.java
  98. 5 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDeprecatedKeys.java
  99. 7 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSInputChecker.java
  100. 9 5
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java

+ 12 - 12
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java

@@ -18,6 +18,18 @@
 
 package org.apache.hadoop.fs.http.client;
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.Collection;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.ContentSummary;
@@ -44,18 +56,6 @@ import org.junit.runners.Parameterized;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.webapp.WebAppContext;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.net.URL;
-import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
-import java.util.Collection;
-
 @RunWith(value = Parameterized.class)
 public class TestHttpFSFileSystem extends HFSTestCase {
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java

@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.fs.http.client;
 
+import java.net.URI;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
@@ -26,8 +28,6 @@ import org.junit.Assert;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
-import java.net.URI;
-
 @RunWith(value = Parameterized.class)
 public class TestWebhdfsFileSystem extends TestHttpFSFileSystem {
 

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java

@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.fs.http.server;
 
-import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
-import org.junit.Test;
-import org.mockito.Mockito;
-
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.junit.Test;
+import org.mockito.Mockito;
+
 public class TestCheckUploadContentTypeFilter {
 
   @Test

+ 27 - 25
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java

@@ -18,7 +18,23 @@
 
 package org.apache.hadoop.fs.http.server;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileSystem;
@@ -38,20 +54,6 @@ import org.junit.Test;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.webapp.WebAppContext;
 
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.text.MessageFormat;
-import java.util.Arrays;
-import java.util.List;
-
 public class TestHttpFSServer extends HFSTestCase {
 
   @Test
@@ -103,9 +105,9 @@ public class TestHttpFSServer extends HFSTestCase {
   }
   private void createHttpFSServer() throws Exception {
     File homeDir = TestDirHelper.getTestDir();
-    Assert.assertTrue(new File(homeDir, "conf").mkdir());
-    Assert.assertTrue(new File(homeDir, "log").mkdir());
-    Assert.assertTrue(new File(homeDir, "temp").mkdir());
+    assertTrue(new File(homeDir, "conf").mkdir());
+    assertTrue(new File(homeDir, "log").mkdir());
+    assertTrue(new File(homeDir, "temp").mkdir());
     HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
 
     File secretFile = new File(new File(homeDir, "conf"), "secret");
@@ -157,23 +159,23 @@ public class TestHttpFSServer extends HFSTestCase {
     URL url = new URL(TestJettyHelper.getJettyURL(),
                       MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation", "nobody"));
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
 
     url = new URL(TestJettyHelper.getJettyURL(),
                   MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation",
                                        HadoopUsersConfTestHelper.getHadoopUsers()[0]));
     conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
     String line = reader.readLine();
     reader.close();
-    Assert.assertTrue(line.contains("\"counters\":{"));
+    assertTrue(line.contains("\"counters\":{"));
 
     url = new URL(TestJettyHelper.getJettyURL(),
                   MessageFormat.format("/webhdfs/v1/foo?user.name={0}&op=instrumentation",
                                        HadoopUsersConfTestHelper.getHadoopUsers()[0]));
     conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
   }
 
   @Test
@@ -187,7 +189,7 @@ public class TestHttpFSServer extends HFSTestCase {
     URL url = new URL(TestJettyHelper.getJettyURL(),
                       MessageFormat.format("/webhdfs/v1/?user.name={0}&op=liststatus", user));
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
     reader.readLine();
     reader.close();
@@ -208,7 +210,7 @@ public class TestHttpFSServer extends HFSTestCase {
     URL url = new URL(TestJettyHelper.getJettyURL(),
                       MessageFormat.format("/webhdfs/v1/tmp?user.name={0}&op=liststatus&filter=f*", user));
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
     reader.readLine();
     reader.close();
@@ -228,7 +230,7 @@ public class TestHttpFSServer extends HFSTestCase {
     conn.setDoInput(true);
     conn.setDoOutput(true);
     conn.setRequestMethod("PUT");
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
   }
 
 }

+ 11 - 9
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestRunnableCallable.java

@@ -19,12 +19,14 @@
 package org.apache.hadoop.lib.lang;
 
 
-import junit.framework.Assert;
-import org.apache.hadoop.test.HTestCase;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.util.concurrent.Callable;
 
+import org.apache.hadoop.test.HTestCase;
+import org.junit.Test;
+
 public class TestRunnableCallable extends HTestCase {
 
   public static class R implements Runnable {
@@ -59,14 +61,14 @@ public class TestRunnableCallable extends HTestCase {
     R r = new R();
     RunnableCallable rc = new RunnableCallable(r);
     rc.run();
-    Assert.assertTrue(r.RUN);
+    assertTrue(r.RUN);
 
     r = new R();
     rc = new RunnableCallable(r);
     rc.call();
-    Assert.assertTrue(r.RUN);
+    assertTrue(r.RUN);
 
-    Assert.assertEquals(rc.toString(), "R");
+    assertEquals(rc.toString(), "R");
   }
 
   @Test
@@ -74,14 +76,14 @@ public class TestRunnableCallable extends HTestCase {
     C c = new C();
     RunnableCallable rc = new RunnableCallable(c);
     rc.run();
-    Assert.assertTrue(c.RUN);
+    assertTrue(c.RUN);
 
     c = new C();
     rc = new RunnableCallable(c);
     rc.call();
-    Assert.assertTrue(c.RUN);
+    assertTrue(c.RUN);
 
-    Assert.assertEquals(rc.toString(), "C");
+    assertEquals(rc.toString(), "C");
   }
 
   @Test(expected = RuntimeException.class)

+ 15 - 13
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestXException.java

@@ -19,7 +19,9 @@
 package org.apache.hadoop.lib.lang;
 
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
 import org.apache.hadoop.test.HTestCase;
 import org.junit.Test;
 
@@ -37,26 +39,26 @@ public class TestXException extends HTestCase {
   @Test
   public void testXException() throws Exception {
     XException ex = new XException(TestERROR.TC);
-    Assert.assertEquals(ex.getError(), TestERROR.TC);
-    Assert.assertEquals(ex.getMessage(), "TC: {0}");
-    Assert.assertNull(ex.getCause());
+    assertEquals(ex.getError(), TestERROR.TC);
+    assertEquals(ex.getMessage(), "TC: {0}");
+    assertNull(ex.getCause());
 
     ex = new XException(TestERROR.TC, "msg");
-    Assert.assertEquals(ex.getError(), TestERROR.TC);
-    Assert.assertEquals(ex.getMessage(), "TC: msg");
-    Assert.assertNull(ex.getCause());
+    assertEquals(ex.getError(), TestERROR.TC);
+    assertEquals(ex.getMessage(), "TC: msg");
+    assertNull(ex.getCause());
 
     Exception cause = new Exception();
     ex = new XException(TestERROR.TC, cause);
-    Assert.assertEquals(ex.getError(), TestERROR.TC);
-    Assert.assertEquals(ex.getMessage(), "TC: " + cause.toString());
-    Assert.assertEquals(ex.getCause(), cause);
+    assertEquals(ex.getError(), TestERROR.TC);
+    assertEquals(ex.getMessage(), "TC: " + cause.toString());
+    assertEquals(ex.getCause(), cause);
 
     XException xcause = ex;
     ex = new XException(xcause);
-    Assert.assertEquals(ex.getError(), TestERROR.TC);
-    Assert.assertEquals(ex.getMessage(), xcause.getMessage());
-    Assert.assertEquals(ex.getCause(), xcause);
+    assertEquals(ex.getError(), TestERROR.TC);
+    assertEquals(ex.getMessage(), xcause.getMessage());
+    assertEquals(ex.getCause(), xcause);
   }
 
 }

+ 11 - 8
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestBaseService.java

@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.lib.server;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.HTestCase;
 import org.junit.Test;
@@ -47,9 +50,9 @@ public class TestBaseService extends HTestCase {
   @Test
   public void baseService() throws Exception {
     BaseService service = new MyService();
-    Assert.assertNull(service.getInterface());
-    Assert.assertEquals(service.getPrefix(), "myservice");
-    Assert.assertEquals(service.getServiceDependencies().length, 0);
+    assertNull(service.getInterface());
+    assertEquals(service.getPrefix(), "myservice");
+    assertEquals(service.getServiceDependencies().length, 0);
 
     Server server = Mockito.mock(Server.class);
     Configuration conf = new Configuration(false);
@@ -60,9 +63,9 @@ public class TestBaseService extends HTestCase {
     Mockito.when(server.getPrefixedName("myservice.")).thenReturn("server.myservice.");
 
     service.init(server);
-    Assert.assertEquals(service.getPrefixedName("foo"), "server.myservice.foo");
-    Assert.assertEquals(service.getServiceConfig().size(), 1);
-    Assert.assertEquals(service.getServiceConfig().get("foo"), "FOO");
-    Assert.assertTrue(MyService.INIT);
+    assertEquals(service.getPrefixedName("foo"), "server.myservice.foo");
+    assertEquals(service.getServiceConfig().size(), 1);
+    assertEquals(service.getServiceConfig().get("foo"), "FOO");
+    assertTrue(MyService.INIT);
   }
 }

+ 138 - 132
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java

@@ -18,16 +18,12 @@
 
 package org.apache.hadoop.lib.server;
 
-import junit.framework.Assert;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.lib.lang.XException;
-import org.apache.hadoop.test.HTestCase;
-import org.apache.hadoop.test.TestDir;
-import org.apache.hadoop.test.TestDirHelper;
-import org.apache.hadoop.test.TestException;
-import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.io.FileOutputStream;
@@ -39,50 +35,60 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.lib.lang.XException;
+import org.apache.hadoop.test.HTestCase;
+import org.apache.hadoop.test.TestDir;
+import org.apache.hadoop.test.TestDirHelper;
+import org.apache.hadoop.test.TestException;
+import org.apache.hadoop.util.StringUtils;
+import org.junit.Test;
+
 public class TestServer extends HTestCase {
 
   @Test
   @TestDir
   public void constructorsGetters() throws Exception {
     Server server = new Server("server", "/a", "/b", "/c", "/d", new Configuration(false));
-    Assert.assertEquals(server.getHomeDir(), "/a");
-    Assert.assertEquals(server.getConfigDir(), "/b");
-    Assert.assertEquals(server.getLogDir(), "/c");
-    Assert.assertEquals(server.getTempDir(), "/d");
-    Assert.assertEquals(server.getName(), "server");
-    Assert.assertEquals(server.getPrefix(), "server");
-    Assert.assertEquals(server.getPrefixedName("name"), "server.name");
-    Assert.assertNotNull(server.getConfig());
+    assertEquals(server.getHomeDir(), "/a");
+    assertEquals(server.getConfigDir(), "/b");
+    assertEquals(server.getLogDir(), "/c");
+    assertEquals(server.getTempDir(), "/d");
+    assertEquals(server.getName(), "server");
+    assertEquals(server.getPrefix(), "server");
+    assertEquals(server.getPrefixedName("name"), "server.name");
+    assertNotNull(server.getConfig());
 
     server = new Server("server", "/a", "/b", "/c", "/d");
-    Assert.assertEquals(server.getHomeDir(), "/a");
-    Assert.assertEquals(server.getConfigDir(), "/b");
-    Assert.assertEquals(server.getLogDir(), "/c");
-    Assert.assertEquals(server.getTempDir(), "/d");
-    Assert.assertEquals(server.getName(), "server");
-    Assert.assertEquals(server.getPrefix(), "server");
-    Assert.assertEquals(server.getPrefixedName("name"), "server.name");
-    Assert.assertNull(server.getConfig());
+    assertEquals(server.getHomeDir(), "/a");
+    assertEquals(server.getConfigDir(), "/b");
+    assertEquals(server.getLogDir(), "/c");
+    assertEquals(server.getTempDir(), "/d");
+    assertEquals(server.getName(), "server");
+    assertEquals(server.getPrefix(), "server");
+    assertEquals(server.getPrefixedName("name"), "server.name");
+    assertNull(server.getConfig());
 
     server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), new Configuration(false));
-    Assert.assertEquals(server.getHomeDir(), TestDirHelper.getTestDir().getAbsolutePath());
-    Assert.assertEquals(server.getConfigDir(), TestDirHelper.getTestDir() + "/conf");
-    Assert.assertEquals(server.getLogDir(), TestDirHelper.getTestDir() + "/log");
-    Assert.assertEquals(server.getTempDir(), TestDirHelper.getTestDir() + "/temp");
-    Assert.assertEquals(server.getName(), "server");
-    Assert.assertEquals(server.getPrefix(), "server");
-    Assert.assertEquals(server.getPrefixedName("name"), "server.name");
-    Assert.assertNotNull(server.getConfig());
+    assertEquals(server.getHomeDir(), TestDirHelper.getTestDir().getAbsolutePath());
+    assertEquals(server.getConfigDir(), TestDirHelper.getTestDir() + "/conf");
+    assertEquals(server.getLogDir(), TestDirHelper.getTestDir() + "/log");
+    assertEquals(server.getTempDir(), TestDirHelper.getTestDir() + "/temp");
+    assertEquals(server.getName(), "server");
+    assertEquals(server.getPrefix(), "server");
+    assertEquals(server.getPrefixedName("name"), "server.name");
+    assertNotNull(server.getConfig());
 
     server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath());
-    Assert.assertEquals(server.getHomeDir(), TestDirHelper.getTestDir().getAbsolutePath());
-    Assert.assertEquals(server.getConfigDir(), TestDirHelper.getTestDir() + "/conf");
-    Assert.assertEquals(server.getLogDir(), TestDirHelper.getTestDir() + "/log");
-    Assert.assertEquals(server.getTempDir(), TestDirHelper.getTestDir() + "/temp");
-    Assert.assertEquals(server.getName(), "server");
-    Assert.assertEquals(server.getPrefix(), "server");
-    Assert.assertEquals(server.getPrefixedName("name"), "server.name");
-    Assert.assertNull(server.getConfig());
+    assertEquals(server.getHomeDir(), TestDirHelper.getTestDir().getAbsolutePath());
+    assertEquals(server.getConfigDir(), TestDirHelper.getTestDir() + "/conf");
+    assertEquals(server.getLogDir(), TestDirHelper.getTestDir() + "/log");
+    assertEquals(server.getTempDir(), TestDirHelper.getTestDir() + "/temp");
+    assertEquals(server.getName(), "server");
+    assertEquals(server.getPrefix(), "server");
+    assertEquals(server.getPrefixedName("name"), "server.name");
+    assertNull(server.getConfig());
   }
 
   @Test
@@ -113,9 +119,9 @@ public class TestServer extends HTestCase {
   @TestDir
   public void initNoConfigDir() throws Exception {
     File homeDir = new File(TestDirHelper.getTestDir(), "home");
-    Assert.assertTrue(homeDir.mkdir());
-    Assert.assertTrue(new File(homeDir, "log").mkdir());
-    Assert.assertTrue(new File(homeDir, "temp").mkdir());
+    assertTrue(homeDir.mkdir());
+    assertTrue(new File(homeDir, "log").mkdir());
+    assertTrue(new File(homeDir, "temp").mkdir());
     Configuration conf = new Configuration(false);
     conf.set("server.services", TestService.class.getName());
     Server server = new Server("server", homeDir.getAbsolutePath(), conf);
@@ -127,9 +133,9 @@ public class TestServer extends HTestCase {
   @TestDir
   public void initConfigDirNotDir() throws Exception {
     File homeDir = new File(TestDirHelper.getTestDir(), "home");
-    Assert.assertTrue(homeDir.mkdir());
-    Assert.assertTrue(new File(homeDir, "log").mkdir());
-    Assert.assertTrue(new File(homeDir, "temp").mkdir());
+    assertTrue(homeDir.mkdir());
+    assertTrue(new File(homeDir, "log").mkdir());
+    assertTrue(new File(homeDir, "temp").mkdir());
     File configDir = new File(homeDir, "conf");
     new FileOutputStream(configDir).close();
     Configuration conf = new Configuration(false);
@@ -143,9 +149,9 @@ public class TestServer extends HTestCase {
   @TestDir
   public void initNoLogDir() throws Exception {
     File homeDir = new File(TestDirHelper.getTestDir(), "home");
-    Assert.assertTrue(homeDir.mkdir());
-    Assert.assertTrue(new File(homeDir, "conf").mkdir());
-    Assert.assertTrue(new File(homeDir, "temp").mkdir());
+    assertTrue(homeDir.mkdir());
+    assertTrue(new File(homeDir, "conf").mkdir());
+    assertTrue(new File(homeDir, "temp").mkdir());
     Configuration conf = new Configuration(false);
     conf.set("server.services", TestService.class.getName());
     Server server = new Server("server", homeDir.getAbsolutePath(), conf);
@@ -157,9 +163,9 @@ public class TestServer extends HTestCase {
   @TestDir
   public void initLogDirNotDir() throws Exception {
     File homeDir = new File(TestDirHelper.getTestDir(), "home");
-    Assert.assertTrue(homeDir.mkdir());
-    Assert.assertTrue(new File(homeDir, "conf").mkdir());
-    Assert.assertTrue(new File(homeDir, "temp").mkdir());
+    assertTrue(homeDir.mkdir());
+    assertTrue(new File(homeDir, "conf").mkdir());
+    assertTrue(new File(homeDir, "temp").mkdir());
     File logDir = new File(homeDir, "log");
     new FileOutputStream(logDir).close();
     Configuration conf = new Configuration(false);
@@ -173,9 +179,9 @@ public class TestServer extends HTestCase {
   @TestDir
   public void initNoTempDir() throws Exception {
     File homeDir = new File(TestDirHelper.getTestDir(), "home");
-    Assert.assertTrue(homeDir.mkdir());
-    Assert.assertTrue(new File(homeDir, "conf").mkdir());
-    Assert.assertTrue(new File(homeDir, "log").mkdir());
+    assertTrue(homeDir.mkdir());
+    assertTrue(new File(homeDir, "conf").mkdir());
+    assertTrue(new File(homeDir, "log").mkdir());
     Configuration conf = new Configuration(false);
     conf.set("server.services", TestService.class.getName());
     Server server = new Server("server", homeDir.getAbsolutePath(), conf);
@@ -187,9 +193,9 @@ public class TestServer extends HTestCase {
   @TestDir
   public void initTempDirNotDir() throws Exception {
     File homeDir = new File(TestDirHelper.getTestDir(), "home");
-    Assert.assertTrue(homeDir.mkdir());
-    Assert.assertTrue(new File(homeDir, "conf").mkdir());
-    Assert.assertTrue(new File(homeDir, "log").mkdir());
+    assertTrue(homeDir.mkdir());
+    assertTrue(new File(homeDir, "conf").mkdir());
+    assertTrue(new File(homeDir, "log").mkdir());
     File tempDir = new File(homeDir, "temp");
     new FileOutputStream(tempDir).close();
     Configuration conf = new Configuration(false);
@@ -204,7 +210,7 @@ public class TestServer extends HTestCase {
   public void siteFileNotAFile() throws Exception {
     String homeDir = TestDirHelper.getTestDir().getAbsolutePath();
     File siteFile = new File(homeDir, "server-site.xml");
-    Assert.assertTrue(siteFile.mkdir());
+    assertTrue(siteFile.mkdir());
     Server server = new Server("server", homeDir, homeDir, homeDir, homeDir);
     server.init();
   }
@@ -234,12 +240,12 @@ public class TestServer extends HTestCase {
 
     @Override
     protected void init() throws ServiceException {
-      Assert.assertEquals(getServer().getStatus(), Server.Status.BOOTING);
+      assertEquals(getServer().getStatus(), Server.Status.BOOTING);
     }
 
     @Override
     public void destroy() {
-      Assert.assertEquals(getServer().getStatus(), Server.Status.SHUTTING_DOWN);
+      assertEquals(getServer().getStatus(), Server.Status.SHUTTING_DOWN);
       super.destroy();
     }
 
@@ -255,12 +261,12 @@ public class TestServer extends HTestCase {
     Configuration conf = new Configuration(false);
     conf.set("server.services", LifeCycleService.class.getName());
     Server server = createServer(conf);
-    Assert.assertEquals(server.getStatus(), Server.Status.UNDEF);
+    assertEquals(server.getStatus(), Server.Status.UNDEF);
     server.init();
-    Assert.assertNotNull(server.get(LifeCycleService.class));
-    Assert.assertEquals(server.getStatus(), Server.Status.NORMAL);
+    assertNotNull(server.get(LifeCycleService.class));
+    assertEquals(server.getStatus(), Server.Status.NORMAL);
     server.destroy();
-    Assert.assertEquals(server.getStatus(), Server.Status.SHUTDOWN);
+    assertEquals(server.getStatus(), Server.Status.SHUTDOWN);
   }
 
   @Test
@@ -270,7 +276,7 @@ public class TestServer extends HTestCase {
     conf.set("server.startup.status", "ADMIN");
     Server server = createServer(conf);
     server.init();
-    Assert.assertEquals(server.getStatus(), Server.Status.ADMIN);
+    assertEquals(server.getStatus(), Server.Status.ADMIN);
     server.destroy();
   }
 
@@ -334,7 +340,7 @@ public class TestServer extends HTestCase {
     Server server = createServer(conf);
     server.init();
     server.setStatus(Server.Status.ADMIN);
-    Assert.assertTrue(TestService.LIFECYCLE.contains("serverStatusChange"));
+    assertTrue(TestService.LIFECYCLE.contains("serverStatusChange"));
   }
 
   @Test
@@ -357,7 +363,7 @@ public class TestServer extends HTestCase {
     server.init();
     TestService.LIFECYCLE.clear();
     server.setStatus(server.getStatus());
-    Assert.assertFalse(TestService.LIFECYCLE.contains("serverStatusChange"));
+    assertFalse(TestService.LIFECYCLE.contains("serverStatusChange"));
   }
 
   @Test
@@ -368,9 +374,9 @@ public class TestServer extends HTestCase {
     conf.set("server.services", TestService.class.getName());
     Server server = createServer(conf);
     server.init();
-    Assert.assertNotNull(server.get(TestService.class));
+    assertNotNull(server.get(TestService.class));
     server.destroy();
-    Assert.assertEquals(TestService.LIFECYCLE, Arrays.asList("init", "postInit", "serverStatusChange", "destroy"));
+    assertEquals(TestService.LIFECYCLE, Arrays.asList("init", "postInit", "serverStatusChange", "destroy"));
   }
 
   @Test
@@ -379,7 +385,7 @@ public class TestServer extends HTestCase {
     String dir = TestDirHelper.getTestDir().getAbsolutePath();
     Server server = new Server("testserver", dir, dir, dir, dir);
     server.init();
-    Assert.assertEquals(server.getConfig().get("testserver.a"), "default");
+    assertEquals(server.getConfig().get("testserver.a"), "default");
   }
 
   @Test
@@ -392,7 +398,7 @@ public class TestServer extends HTestCase {
     w.close();
     Server server = new Server("testserver", dir, dir, dir, dir);
     server.init();
-    Assert.assertEquals(server.getConfig().get("testserver.a"), "site");
+    assertEquals(server.getConfig().get("testserver.a"), "site");
   }
 
   @Test
@@ -407,7 +413,7 @@ public class TestServer extends HTestCase {
       w.close();
       Server server = new Server("testserver", dir, dir, dir, dir);
       server.init();
-      Assert.assertEquals(server.getConfig().get("testserver.a"), "sysprop");
+      assertEquals(server.getConfig().get("testserver.a"), "sysprop");
     } finally {
       System.getProperties().remove("testserver.a");
     }
@@ -633,7 +639,7 @@ public class TestServer extends HTestCase {
     conf = new Configuration(false);
     server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
-    Assert.assertEquals(ORDER.size(), 0);
+    assertEquals(ORDER.size(), 0);
 
     // 2 services init/destroy
     ORDER.clear();
@@ -643,17 +649,17 @@ public class TestServer extends HTestCase {
     conf.set("server.services", services);
     server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
-    Assert.assertEquals(server.get(MyService1.class).getInterface(), MyService1.class);
-    Assert.assertEquals(server.get(MyService3.class).getInterface(), MyService3.class);
-    Assert.assertEquals(ORDER.size(), 4);
-    Assert.assertEquals(ORDER.get(0), "s1.init");
-    Assert.assertEquals(ORDER.get(1), "s3.init");
-    Assert.assertEquals(ORDER.get(2), "s1.postInit");
-    Assert.assertEquals(ORDER.get(3), "s3.postInit");
+    assertEquals(server.get(MyService1.class).getInterface(), MyService1.class);
+    assertEquals(server.get(MyService3.class).getInterface(), MyService3.class);
+    assertEquals(ORDER.size(), 4);
+    assertEquals(ORDER.get(0), "s1.init");
+    assertEquals(ORDER.get(1), "s3.init");
+    assertEquals(ORDER.get(2), "s1.postInit");
+    assertEquals(ORDER.get(3), "s3.postInit");
     server.destroy();
-    Assert.assertEquals(ORDER.size(), 6);
-    Assert.assertEquals(ORDER.get(4), "s3.destroy");
-    Assert.assertEquals(ORDER.get(5), "s1.destroy");
+    assertEquals(ORDER.size(), 6);
+    assertEquals(ORDER.get(4), "s3.destroy");
+    assertEquals(ORDER.get(5), "s1.destroy");
 
     // 3 services, 2nd one fails on init
     ORDER.clear();
@@ -665,16 +671,16 @@ public class TestServer extends HTestCase {
     server = new Server("server", dir, dir, dir, dir, conf);
     try {
       server.init();
-      Assert.fail();
+      fail();
     } catch (ServerException ex) {
-      Assert.assertEquals(MyService2.class, ex.getError().getClass());
+      assertEquals(MyService2.class, ex.getError().getClass());
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
-    Assert.assertEquals(ORDER.size(), 3);
-    Assert.assertEquals(ORDER.get(0), "s1.init");
-    Assert.assertEquals(ORDER.get(1), "s2.init");
-    Assert.assertEquals(ORDER.get(2), "s1.destroy");
+    assertEquals(ORDER.size(), 3);
+    assertEquals(ORDER.get(0), "s1.init");
+    assertEquals(ORDER.get(1), "s2.init");
+    assertEquals(ORDER.get(2), "s1.destroy");
 
     // 2 services one fails on destroy
     ORDER.clear();
@@ -683,15 +689,15 @@ public class TestServer extends HTestCase {
     conf.set("server.services", services);
     server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
-    Assert.assertEquals(ORDER.size(), 4);
-    Assert.assertEquals(ORDER.get(0), "s1.init");
-    Assert.assertEquals(ORDER.get(1), "s5.init");
-    Assert.assertEquals(ORDER.get(2), "s1.postInit");
-    Assert.assertEquals(ORDER.get(3), "s5.postInit");
+    assertEquals(ORDER.size(), 4);
+    assertEquals(ORDER.get(0), "s1.init");
+    assertEquals(ORDER.get(1), "s5.init");
+    assertEquals(ORDER.get(2), "s1.postInit");
+    assertEquals(ORDER.get(3), "s5.postInit");
     server.destroy();
-    Assert.assertEquals(ORDER.size(), 6);
-    Assert.assertEquals(ORDER.get(4), "s5.destroy");
-    Assert.assertEquals(ORDER.get(5), "s1.destroy");
+    assertEquals(ORDER.size(), 6);
+    assertEquals(ORDER.get(4), "s5.destroy");
+    assertEquals(ORDER.get(5), "s1.destroy");
 
 
     // service override via ext
@@ -705,16 +711,16 @@ public class TestServer extends HTestCase {
     server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
 
-    Assert.assertEquals(server.get(MyService1.class).getClass(), MyService1a.class);
-    Assert.assertEquals(ORDER.size(), 4);
-    Assert.assertEquals(ORDER.get(0), "s1a.init");
-    Assert.assertEquals(ORDER.get(1), "s3.init");
-    Assert.assertEquals(ORDER.get(2), "s1a.postInit");
-    Assert.assertEquals(ORDER.get(3), "s3.postInit");
+    assertEquals(server.get(MyService1.class).getClass(), MyService1a.class);
+    assertEquals(ORDER.size(), 4);
+    assertEquals(ORDER.get(0), "s1a.init");
+    assertEquals(ORDER.get(1), "s3.init");
+    assertEquals(ORDER.get(2), "s1a.postInit");
+    assertEquals(ORDER.get(3), "s3.postInit");
     server.destroy();
-    Assert.assertEquals(ORDER.size(), 6);
-    Assert.assertEquals(ORDER.get(4), "s3.destroy");
-    Assert.assertEquals(ORDER.get(5), "s1a.destroy");
+    assertEquals(ORDER.size(), 6);
+    assertEquals(ORDER.get(4), "s3.destroy");
+    assertEquals(ORDER.get(5), "s1a.destroy");
 
     // service override via setService
     ORDER.clear();
@@ -725,16 +731,16 @@ public class TestServer extends HTestCase {
     server.init();
 
     server.setService(MyService1a.class);
-    Assert.assertEquals(ORDER.size(), 6);
-    Assert.assertEquals(ORDER.get(4), "s1.destroy");
-    Assert.assertEquals(ORDER.get(5), "s1a.init");
+    assertEquals(ORDER.size(), 6);
+    assertEquals(ORDER.get(4), "s1.destroy");
+    assertEquals(ORDER.get(5), "s1a.init");
 
-    Assert.assertEquals(server.get(MyService1.class).getClass(), MyService1a.class);
+    assertEquals(server.get(MyService1.class).getClass(), MyService1a.class);
 
     server.destroy();
-    Assert.assertEquals(ORDER.size(), 8);
-    Assert.assertEquals(ORDER.get(6), "s3.destroy");
-    Assert.assertEquals(ORDER.get(7), "s1a.destroy");
+    assertEquals(ORDER.size(), 8);
+    assertEquals(ORDER.get(6), "s3.destroy");
+    assertEquals(ORDER.get(7), "s1a.destroy");
 
     // service add via setService
     ORDER.clear();
@@ -745,16 +751,16 @@ public class TestServer extends HTestCase {
     server.init();
 
     server.setService(MyService5.class);
-    Assert.assertEquals(ORDER.size(), 5);
-    Assert.assertEquals(ORDER.get(4), "s5.init");
+    assertEquals(ORDER.size(), 5);
+    assertEquals(ORDER.get(4), "s5.init");
 
-    Assert.assertEquals(server.get(MyService5.class).getClass(), MyService5.class);
+    assertEquals(server.get(MyService5.class).getClass(), MyService5.class);
 
     server.destroy();
-    Assert.assertEquals(ORDER.size(), 8);
-    Assert.assertEquals(ORDER.get(5), "s5.destroy");
-    Assert.assertEquals(ORDER.get(6), "s3.destroy");
-    Assert.assertEquals(ORDER.get(7), "s1.destroy");
+    assertEquals(ORDER.size(), 8);
+    assertEquals(ORDER.get(5), "s5.destroy");
+    assertEquals(ORDER.get(6), "s3.destroy");
+    assertEquals(ORDER.get(7), "s1.destroy");
 
     // service add via setService exception
     ORDER.clear();
@@ -765,15 +771,15 @@ public class TestServer extends HTestCase {
     server.init();
     try {
       server.setService(MyService7.class);
-      Assert.fail();
+      fail();
     } catch (ServerException ex) {
-      Assert.assertEquals(ServerException.ERROR.S09, ex.getError());
+      assertEquals(ServerException.ERROR.S09, ex.getError());
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
-    Assert.assertEquals(ORDER.size(), 6);
-    Assert.assertEquals(ORDER.get(4), "s3.destroy");
-    Assert.assertEquals(ORDER.get(5), "s1.destroy");
+    assertEquals(ORDER.size(), 6);
+    assertEquals(ORDER.get(4), "s3.destroy");
+    assertEquals(ORDER.get(5), "s1.destroy");
 
     // service with dependency
     ORDER.clear();
@@ -782,8 +788,8 @@ public class TestServer extends HTestCase {
     conf.set("server.services", services);
     server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
-    Assert.assertEquals(server.get(MyService1.class).getInterface(), MyService1.class);
-    Assert.assertEquals(server.get(MyService6.class).getInterface(), MyService6.class);
+    assertEquals(server.get(MyService1.class).getInterface(), MyService1.class);
+    assertEquals(server.get(MyService6.class).getInterface(), MyService6.class);
     server.destroy();
   }
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServerConstructor.java

@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.lib.server;
 
+import java.util.Arrays;
+import java.util.Collection;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.HTestCase;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
-import java.util.Arrays;
-import java.util.Collection;
-
 @RunWith(value = Parameterized.class)
 public class TestServerConstructor extends HTestCase {
 

+ 23 - 20
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/hadoop/TestFileSystemAccessService.java

@@ -18,7 +18,16 @@
 
 package org.apache.hadoop.lib.service.hadoop;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Arrays;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileSystem;
@@ -38,12 +47,6 @@ import org.apache.hadoop.util.StringUtils;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.Arrays;
-
 public class TestFileSystemAccessService extends HFSTestCase {
 
   private void createHadoopConf(Configuration hadoopConf) throws Exception {
@@ -71,7 +74,7 @@ public class TestFileSystemAccessService extends HFSTestCase {
     conf.set("server.services", services);
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
-    Assert.assertNotNull(server.get(FileSystemAccess.class));
+    assertNotNull(server.get(FileSystemAccess.class));
     server.destroy();
   }
 
@@ -148,7 +151,7 @@ public class TestFileSystemAccessService extends HFSTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
-    Assert.assertEquals(fsAccess.serviceHadoopConf.get("foo"), "FOO");
+    assertEquals(fsAccess.serviceHadoopConf.get("foo"), "FOO");
     server.destroy();
   }
 
@@ -174,7 +177,7 @@ public class TestFileSystemAccessService extends HFSTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
-    Assert.assertEquals(fsAccess.serviceHadoopConf.get("foo"), "BAR");
+    assertEquals(fsAccess.serviceHadoopConf.get("foo"), "BAR");
     server.destroy();
   }
 
@@ -245,15 +248,15 @@ public class TestFileSystemAccessService extends HFSTestCase {
     server.init();
     FileSystemAccess hadoop = server.get(FileSystemAccess.class);
     FileSystem fs = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
-    Assert.assertNotNull(fs);
+    assertNotNull(fs);
     fs.mkdirs(new Path("/tmp/foo"));
     hadoop.releaseFileSystem(fs);
     try {
       fs.mkdirs(new Path("/tmp/foo"));
-      Assert.fail();
+      fail();
     } catch (IOException ex) {
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
     server.destroy();
   }
@@ -288,10 +291,10 @@ public class TestFileSystemAccessService extends HFSTestCase {
     });
     try {
       fsa[0].mkdirs(new Path("/tmp/foo"));
-      Assert.fail();
+      fail();
     } catch (IOException ex) {
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
     server.destroy();
   }
@@ -351,19 +354,19 @@ public class TestFileSystemAccessService extends HFSTestCase {
           throw new IOException();
         }
       });
-      Assert.fail();
+      fail();
     } catch (FileSystemAccessException ex) {
-      Assert.assertEquals(ex.getError(), FileSystemAccessException.ERROR.H03);
+      assertEquals(ex.getError(), FileSystemAccessException.ERROR.H03);
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
 
     try {
       fsa[0].mkdirs(new Path("/tmp/foo"));
-      Assert.fail();
+      fail();
     } catch (IOException ex) {
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
     server.destroy();
   }

+ 94 - 90
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/instrumentation/TestInstrumentationService.java

@@ -18,7 +18,16 @@
 
 package org.apache.hadoop.lib.service.instrumentation;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.StringWriter;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.lib.service.Instrumentation;
@@ -32,11 +41,6 @@ import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
 import org.junit.Test;
 
-import java.io.StringWriter;
-import java.util.Arrays;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-
 public class TestInstrumentationService extends HTestCase {
 
   @Override
@@ -47,51 +51,51 @@ public class TestInstrumentationService extends HTestCase {
   @Test
   public void cron() {
     InstrumentationService.Cron cron = new InstrumentationService.Cron();
-    Assert.assertEquals(cron.start, 0);
-    Assert.assertEquals(cron.lapStart, 0);
-    Assert.assertEquals(cron.own, 0);
-    Assert.assertEquals(cron.total, 0);
+    assertEquals(cron.start, 0);
+    assertEquals(cron.lapStart, 0);
+    assertEquals(cron.own, 0);
+    assertEquals(cron.total, 0);
     long begin = Time.now();
-    Assert.assertEquals(cron.start(), cron);
-    Assert.assertEquals(cron.start(), cron);
-    Assert.assertEquals(cron.start, begin, 20);
-    Assert.assertEquals(cron.start, cron.lapStart);
+    assertEquals(cron.start(), cron);
+    assertEquals(cron.start(), cron);
+    assertEquals(cron.start, begin, 20);
+    assertEquals(cron.start, cron.lapStart);
     sleep(100);
-    Assert.assertEquals(cron.stop(), cron);
+    assertEquals(cron.stop(), cron);
     long end = Time.now();
     long delta = end - begin;
-    Assert.assertEquals(cron.own, delta, 20);
-    Assert.assertEquals(cron.total, 0);
-    Assert.assertEquals(cron.lapStart, 0);
+    assertEquals(cron.own, delta, 20);
+    assertEquals(cron.total, 0);
+    assertEquals(cron.lapStart, 0);
     sleep(100);
     long reStart = Time.now();
     cron.start();
-    Assert.assertEquals(cron.start, begin, 20);
-    Assert.assertEquals(cron.lapStart, reStart, 20);
+    assertEquals(cron.start, begin, 20);
+    assertEquals(cron.lapStart, reStart, 20);
     sleep(100);
     cron.stop();
     long reEnd = Time.now();
     delta += reEnd - reStart;
-    Assert.assertEquals(cron.own, delta, 20);
-    Assert.assertEquals(cron.total, 0);
-    Assert.assertEquals(cron.lapStart, 0);
+    assertEquals(cron.own, delta, 20);
+    assertEquals(cron.total, 0);
+    assertEquals(cron.lapStart, 0);
     cron.end();
-    Assert.assertEquals(cron.total, reEnd - begin, 20);
+    assertEquals(cron.total, reEnd - begin, 20);
 
     try {
       cron.start();
-      Assert.fail();
+      fail();
     } catch (IllegalStateException ex) {
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
 
     try {
       cron.stop();
-      Assert.fail();
+      fail();
     } catch (IllegalStateException ex) {
     } catch (Exception ex) {
-      Assert.fail();
+      fail();
     }
   }
 
@@ -135,10 +139,10 @@ public class TestInstrumentationService extends HTestCase {
 
     timer.addCron(cron);
     long[] values = timer.getValues();
-    Assert.assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20);
+    assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20);
+    assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20);
+    assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20);
+    assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20);
 
     cron = new InstrumentationService.Cron();
 
@@ -168,10 +172,10 @@ public class TestInstrumentationService extends HTestCase {
 
     timer.addCron(cron);
     values = timer.getValues();
-    Assert.assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20);
+    assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20);
+    assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20);
+    assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20);
+    assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20);
 
     avgTotal = totalDelta;
     avgOwn = ownDelta;
@@ -205,27 +209,27 @@ public class TestInstrumentationService extends HTestCase {
     cron.stop();
     timer.addCron(cron);
     values = timer.getValues();
-    Assert.assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20);
-    Assert.assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20);
+    assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20);
+    assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20);
+    assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20);
+    assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20);
 
     JSONObject json = (JSONObject) new JSONParser().parse(timer.toJSONString());
-    Assert.assertEquals(json.size(), 4);
-    Assert.assertEquals(json.get("lastTotal"), values[InstrumentationService.Timer.LAST_TOTAL]);
-    Assert.assertEquals(json.get("lastOwn"), values[InstrumentationService.Timer.LAST_OWN]);
-    Assert.assertEquals(json.get("avgTotal"), values[InstrumentationService.Timer.AVG_TOTAL]);
-    Assert.assertEquals(json.get("avgOwn"), values[InstrumentationService.Timer.AVG_OWN]);
+    assertEquals(json.size(), 4);
+    assertEquals(json.get("lastTotal"), values[InstrumentationService.Timer.LAST_TOTAL]);
+    assertEquals(json.get("lastOwn"), values[InstrumentationService.Timer.LAST_OWN]);
+    assertEquals(json.get("avgTotal"), values[InstrumentationService.Timer.AVG_TOTAL]);
+    assertEquals(json.get("avgOwn"), values[InstrumentationService.Timer.AVG_OWN]);
 
     StringWriter writer = new StringWriter();
     timer.writeJSONString(writer);
     writer.close();
     json = (JSONObject) new JSONParser().parse(writer.toString());
-    Assert.assertEquals(json.size(), 4);
-    Assert.assertEquals(json.get("lastTotal"), values[InstrumentationService.Timer.LAST_TOTAL]);
-    Assert.assertEquals(json.get("lastOwn"), values[InstrumentationService.Timer.LAST_OWN]);
-    Assert.assertEquals(json.get("avgTotal"), values[InstrumentationService.Timer.AVG_TOTAL]);
-    Assert.assertEquals(json.get("avgOwn"), values[InstrumentationService.Timer.AVG_OWN]);
+    assertEquals(json.size(), 4);
+    assertEquals(json.get("lastTotal"), values[InstrumentationService.Timer.LAST_TOTAL]);
+    assertEquals(json.get("lastOwn"), values[InstrumentationService.Timer.LAST_OWN]);
+    assertEquals(json.get("avgTotal"), values[InstrumentationService.Timer.AVG_TOTAL]);
+    assertEquals(json.get("avgOwn"), values[InstrumentationService.Timer.AVG_OWN]);
   }
 
   @Test
@@ -240,34 +244,34 @@ public class TestInstrumentationService extends HTestCase {
 
     InstrumentationService.Sampler sampler = new InstrumentationService.Sampler();
     sampler.init(4, var);
-    Assert.assertEquals(sampler.getRate(), 0f, 0.0001);
+    assertEquals(sampler.getRate(), 0f, 0.0001);
     sampler.sample();
-    Assert.assertEquals(sampler.getRate(), 0f, 0.0001);
+    assertEquals(sampler.getRate(), 0f, 0.0001);
     value[0] = 1;
     sampler.sample();
-    Assert.assertEquals(sampler.getRate(), (0d + 1) / 2, 0.0001);
+    assertEquals(sampler.getRate(), (0d + 1) / 2, 0.0001);
     value[0] = 2;
     sampler.sample();
-    Assert.assertEquals(sampler.getRate(), (0d + 1 + 2) / 3, 0.0001);
+    assertEquals(sampler.getRate(), (0d + 1 + 2) / 3, 0.0001);
     value[0] = 3;
     sampler.sample();
-    Assert.assertEquals(sampler.getRate(), (0d + 1 + 2 + 3) / 4, 0.0001);
+    assertEquals(sampler.getRate(), (0d + 1 + 2 + 3) / 4, 0.0001);
     value[0] = 4;
     sampler.sample();
-    Assert.assertEquals(sampler.getRate(), (4d + 1 + 2 + 3) / 4, 0.0001);
+    assertEquals(sampler.getRate(), (4d + 1 + 2 + 3) / 4, 0.0001);
 
     JSONObject json = (JSONObject) new JSONParser().parse(sampler.toJSONString());
-    Assert.assertEquals(json.size(), 2);
-    Assert.assertEquals(json.get("sampler"), sampler.getRate());
-    Assert.assertEquals(json.get("size"), 4L);
+    assertEquals(json.size(), 2);
+    assertEquals(json.get("sampler"), sampler.getRate());
+    assertEquals(json.get("size"), 4L);
 
     StringWriter writer = new StringWriter();
     sampler.writeJSONString(writer);
     writer.close();
     json = (JSONObject) new JSONParser().parse(writer.toString());
-    Assert.assertEquals(json.size(), 2);
-    Assert.assertEquals(json.get("sampler"), sampler.getRate());
-    Assert.assertEquals(json.get("size"), 4L);
+    assertEquals(json.size(), 2);
+    assertEquals(json.get("sampler"), sampler.getRate());
+    assertEquals(json.get("size"), 4L);
   }
 
   @Test
@@ -283,15 +287,15 @@ public class TestInstrumentationService extends HTestCase {
     };
 
     JSONObject json = (JSONObject) new JSONParser().parse(variableHolder.toJSONString());
-    Assert.assertEquals(json.size(), 1);
-    Assert.assertEquals(json.get("value"), "foo");
+    assertEquals(json.size(), 1);
+    assertEquals(json.get("value"), "foo");
 
     StringWriter writer = new StringWriter();
     variableHolder.writeJSONString(writer);
     writer.close();
     json = (JSONObject) new JSONParser().parse(writer.toString());
-    Assert.assertEquals(json.size(), 1);
-    Assert.assertEquals(json.get("value"), "foo");
+    assertEquals(json.size(), 1);
+    assertEquals(json.get("value"), "foo");
   }
 
   @Test
@@ -306,7 +310,7 @@ public class TestInstrumentationService extends HTestCase {
     server.init();
 
     Instrumentation instrumentation = server.get(Instrumentation.class);
-    Assert.assertNotNull(instrumentation);
+    assertNotNull(instrumentation);
     instrumentation.incr("g", "c", 1);
     instrumentation.incr("g", "c", 2);
     instrumentation.incr("g", "c1", 2);
@@ -339,27 +343,27 @@ public class TestInstrumentationService extends HTestCase {
     instrumentation.addSampler("g", "s", 10, varToSample);
 
     Map<String, ?> snapshot = instrumentation.getSnapshot();
-    Assert.assertNotNull(snapshot.get("os-env"));
-    Assert.assertNotNull(snapshot.get("sys-props"));
-    Assert.assertNotNull(snapshot.get("jvm"));
-    Assert.assertNotNull(snapshot.get("counters"));
-    Assert.assertNotNull(snapshot.get("timers"));
-    Assert.assertNotNull(snapshot.get("variables"));
-    Assert.assertNotNull(snapshot.get("samplers"));
-    Assert.assertNotNull(((Map<String, String>) snapshot.get("os-env")).get("PATH"));
-    Assert.assertNotNull(((Map<String, String>) snapshot.get("sys-props")).get("java.version"));
-    Assert.assertNotNull(((Map<String, ?>) snapshot.get("jvm")).get("free.memory"));
-    Assert.assertNotNull(((Map<String, ?>) snapshot.get("jvm")).get("max.memory"));
-    Assert.assertNotNull(((Map<String, ?>) snapshot.get("jvm")).get("total.memory"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("counters")).get("g"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("timers")).get("g"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("variables")).get("g"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("samplers")).get("g"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("counters")).get("g").get("c"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("counters")).get("g").get("c1"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("timers")).get("g").get("t"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("variables")).get("g").get("v"));
-    Assert.assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("samplers")).get("g").get("s"));
+    assertNotNull(snapshot.get("os-env"));
+    assertNotNull(snapshot.get("sys-props"));
+    assertNotNull(snapshot.get("jvm"));
+    assertNotNull(snapshot.get("counters"));
+    assertNotNull(snapshot.get("timers"));
+    assertNotNull(snapshot.get("variables"));
+    assertNotNull(snapshot.get("samplers"));
+    assertNotNull(((Map<String, String>) snapshot.get("os-env")).get("PATH"));
+    assertNotNull(((Map<String, String>) snapshot.get("sys-props")).get("java.version"));
+    assertNotNull(((Map<String, ?>) snapshot.get("jvm")).get("free.memory"));
+    assertNotNull(((Map<String, ?>) snapshot.get("jvm")).get("max.memory"));
+    assertNotNull(((Map<String, ?>) snapshot.get("jvm")).get("total.memory"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("counters")).get("g"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("timers")).get("g"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("variables")).get("g"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("samplers")).get("g"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("counters")).get("g").get("c"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("counters")).get("g").get("c1"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("timers")).get("g").get("t"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("variables")).get("g").get("v"));
+    assertNotNull(((Map<String, Map<String, Object>>) snapshot.get("samplers")).get("g").get("s"));
 
     StringWriter writer = new StringWriter();
     JSONObject.writeJSONString(snapshot, writer);
@@ -392,12 +396,12 @@ public class TestInstrumentationService extends HTestCase {
 
     sleep(2000);
     int i = count.get();
-    Assert.assertTrue(i > 0);
+    assertTrue(i > 0);
 
     Map<String, Map<String, ?>> snapshot = instrumentation.getSnapshot();
     Map<String, Map<String, Object>> samplers = (Map<String, Map<String, Object>>) snapshot.get("samplers");
     InstrumentationService.Sampler sampler = (InstrumentationService.Sampler) samplers.get("g").get("s");
-    Assert.assertTrue(sampler.getRate() > 0);
+    assertTrue(sampler.getRate() > 0);
 
     server.destroy();
   }

+ 5 - 4
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/scheduler/TestSchedulerService.java

@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.lib.service.scheduler;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Arrays;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.lib.service.Scheduler;
@@ -29,8 +32,6 @@ import org.apache.hadoop.test.TestDirHelper;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
 
-import java.util.Arrays;
-
 public class TestSchedulerService extends HTestCase {
 
   @Test
@@ -42,7 +43,7 @@ public class TestSchedulerService extends HTestCase {
                                                                     SchedulerService.class.getName())));
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
-    Assert.assertNotNull(server.get(Scheduler.class));
+    assertNotNull(server.get(Scheduler.class));
     server.destroy();
   }
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/DummyGroupMapping.java

@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.lib.service.security;
 
-import org.apache.hadoop.security.GroupMappingServiceProvider;
-import org.apache.hadoop.test.HadoopUsersConfTestHelper;
-
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.hadoop.security.GroupMappingServiceProvider;
+import org.apache.hadoop.test.HadoopUsersConfTestHelper;
+
 public class DummyGroupMapping implements GroupMappingServiceProvider {
 
   @Override

+ 8 - 6
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestGroupsService.java

@@ -18,7 +18,12 @@
 
 package org.apache.hadoop.lib.service.security;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.lib.service.Groups;
@@ -28,9 +33,6 @@ import org.apache.hadoop.test.TestDirHelper;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
 
-import java.util.Arrays;
-import java.util.List;
-
 public class TestGroupsService extends HTestCase {
 
   @Test
@@ -42,9 +44,9 @@ public class TestGroupsService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     Groups groups = server.get(Groups.class);
-    Assert.assertNotNull(groups);
+    assertNotNull(groups);
     List<String> g = groups.getGroups(System.getProperty("user.name"));
-    Assert.assertNotSame(g.size(), 0);
+    assertNotSame(g.size(), 0);
     server.destroy();
   }
 

+ 14 - 13
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestProxyUserService.java

@@ -18,7 +18,12 @@
 
 package org.apache.hadoop.lib.service.security;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertNotNull;
+
+import java.security.AccessControlException;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.lib.server.ServiceException;
@@ -31,10 +36,6 @@ import org.apache.hadoop.test.TestException;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
 
-import java.security.AccessControlException;
-import java.util.Arrays;
-import java.util.List;
-
 public class TestProxyUserService extends HTestCase {
 
   @Test
@@ -47,7 +48,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     server.destroy();
   }
 
@@ -103,7 +104,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     proxyUser.validate("foo", "localhost", "bar");
     server.destroy();
   }
@@ -120,7 +121,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     proxyUser.validate("bar", "localhost", "foo");
     server.destroy();
   }
@@ -137,7 +138,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     proxyUser.validate("foo", "localhost", "bar");
     server.destroy();
   }
@@ -166,7 +167,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     proxyUser.validate("foo", "localhost", System.getProperty("user.name"));
     server.destroy();
   }
@@ -184,7 +185,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     proxyUser.validate("foo", "unknownhost.bar.foo", "bar");
     server.destroy();
   }
@@ -201,7 +202,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     proxyUser.validate("foo", "www.yahoo.com", "bar");
     server.destroy();
   }
@@ -218,7 +219,7 @@ public class TestProxyUserService extends HTestCase {
     Server server = new Server("server", dir, dir, dir, dir, conf);
     server.init();
     ProxyUser proxyUser = server.get(ProxyUser.class);
-    Assert.assertNotNull(proxyUser);
+    assertNotNull(proxyUser);
     proxyUser.validate("foo", "localhost", System.getProperty("user.name"));
     server.destroy();
   }

+ 13 - 10
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java

@@ -18,18 +18,21 @@
 
 package org.apache.hadoop.lib.servlet;
 
-import junit.framework.Assert;
-import org.apache.hadoop.test.HTestCase;
-import org.junit.Test;
-import org.mockito.Mockito;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
 import javax.servlet.ServletException;
 import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
-import java.io.IOException;
-import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.hadoop.test.HTestCase;
+import org.junit.Test;
+import org.mockito.Mockito;
 
 
 public class TestHostnameFilter extends HTestCase {
@@ -47,17 +50,17 @@ public class TestHostnameFilter extends HTestCase {
       @Override
       public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse)
         throws IOException, ServletException {
-        Assert.assertTrue(HostnameFilter.get().contains("localhost"));
+        assertTrue(HostnameFilter.get().contains("localhost"));
         invoked.set(true);
       }
     };
 
     Filter filter = new HostnameFilter();
     filter.init(null);
-    Assert.assertNull(HostnameFilter.get());
+    assertNull(HostnameFilter.get());
     filter.doFilter(request, response, chain);
-    Assert.assertTrue(invoked.get());
-    Assert.assertNull(HostnameFilter.get());
+    assertTrue(invoked.get());
+    assertNull(HostnameFilter.get());
     filter.destroy();
   }
 

+ 31 - 27
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestMDCFilter.java

@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.lib.servlet;
 
-import junit.framework.Assert;
-import org.apache.hadoop.test.HTestCase;
-import org.junit.Test;
-import org.mockito.Mockito;
-import org.slf4j.MDC;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.security.Principal;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -30,9 +32,11 @@ import javax.servlet.ServletException;
 import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
-import java.io.IOException;
-import java.security.Principal;
-import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.hadoop.test.HTestCase;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.slf4j.MDC;
 
 
 public class TestMDCFilter extends HTestCase {
@@ -52,10 +56,10 @@ public class TestMDCFilter extends HTestCase {
       @Override
       public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse)
         throws IOException, ServletException {
-        Assert.assertEquals(MDC.get("hostname"), null);
-        Assert.assertEquals(MDC.get("user"), null);
-        Assert.assertEquals(MDC.get("method"), "METHOD");
-        Assert.assertEquals(MDC.get("path"), "/pathinfo");
+        assertEquals(MDC.get("hostname"), null);
+        assertEquals(MDC.get("user"), null);
+        assertEquals(MDC.get("method"), "METHOD");
+        assertEquals(MDC.get("path"), "/pathinfo");
         invoked.set(true);
       }
     };
@@ -65,11 +69,11 @@ public class TestMDCFilter extends HTestCase {
     filter.init(null);
 
     filter.doFilter(request, response, chain);
-    Assert.assertTrue(invoked.get());
-    Assert.assertNull(MDC.get("hostname"));
-    Assert.assertNull(MDC.get("user"));
-    Assert.assertNull(MDC.get("method"));
-    Assert.assertNull(MDC.get("path"));
+    assertTrue(invoked.get());
+    assertNull(MDC.get("hostname"));
+    assertNull(MDC.get("user"));
+    assertNull(MDC.get("method"));
+    assertNull(MDC.get("path"));
 
     Mockito.when(request.getUserPrincipal()).thenReturn(new Principal() {
       @Override
@@ -83,15 +87,15 @@ public class TestMDCFilter extends HTestCase {
       @Override
       public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse)
         throws IOException, ServletException {
-        Assert.assertEquals(MDC.get("hostname"), null);
-        Assert.assertEquals(MDC.get("user"), "name");
-        Assert.assertEquals(MDC.get("method"), "METHOD");
-        Assert.assertEquals(MDC.get("path"), "/pathinfo");
+        assertEquals(MDC.get("hostname"), null);
+        assertEquals(MDC.get("user"), "name");
+        assertEquals(MDC.get("method"), "METHOD");
+        assertEquals(MDC.get("path"), "/pathinfo");
         invoked.set(true);
       }
     };
     filter.doFilter(request, response, chain);
-    Assert.assertTrue(invoked.get());
+    assertTrue(invoked.get());
 
     HostnameFilter.HOSTNAME_TL.set("HOST");
 
@@ -100,15 +104,15 @@ public class TestMDCFilter extends HTestCase {
       @Override
       public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse)
         throws IOException, ServletException {
-        Assert.assertEquals(MDC.get("hostname"), "HOST");
-        Assert.assertEquals(MDC.get("user"), "name");
-        Assert.assertEquals(MDC.get("method"), "METHOD");
-        Assert.assertEquals(MDC.get("path"), "/pathinfo");
+        assertEquals(MDC.get("hostname"), "HOST");
+        assertEquals(MDC.get("user"), "name");
+        assertEquals(MDC.get("method"), "METHOD");
+        assertEquals(MDC.get("path"), "/pathinfo");
         invoked.set(true);
       }
     };
     filter.doFilter(request, response, chain);
-    Assert.assertTrue(invoked.get());
+    assertTrue(invoked.get());
 
     HostnameFilter.HOSTNAME_TL.remove();
 

+ 8 - 7
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java

@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.lib.servlet;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.test.HTestCase;
 import org.apache.hadoop.test.TestDir;
@@ -35,10 +36,10 @@ public class TestServerWebApp extends HTestCase {
   @Test
   public void getHomeDir() {
     System.setProperty("TestServerWebApp0.home.dir", "/tmp");
-    Assert.assertEquals(ServerWebApp.getHomeDir("TestServerWebApp0"), "/tmp");
-    Assert.assertEquals(ServerWebApp.getDir("TestServerWebApp0", ".log.dir", "/tmp/log"), "/tmp/log");
+    assertEquals(ServerWebApp.getHomeDir("TestServerWebApp0"), "/tmp");
+    assertEquals(ServerWebApp.getDir("TestServerWebApp0", ".log.dir", "/tmp/log"), "/tmp/log");
     System.setProperty("TestServerWebApp0.log.dir", "/tmplog");
-    Assert.assertEquals(ServerWebApp.getDir("TestServerWebApp0", ".log.dir", "/tmp/log"), "/tmplog");
+    assertEquals(ServerWebApp.getDir("TestServerWebApp0", ".log.dir", "/tmp/log"), "/tmplog");
   }
 
   @Test
@@ -52,11 +53,11 @@ public class TestServerWebApp extends HTestCase {
     ServerWebApp server = new ServerWebApp("TestServerWebApp1") {
     };
 
-    Assert.assertEquals(server.getStatus(), Server.Status.UNDEF);
+    assertEquals(server.getStatus(), Server.Status.UNDEF);
     server.contextInitialized(null);
-    Assert.assertEquals(server.getStatus(), Server.Status.NORMAL);
+    assertEquals(server.getStatus(), Server.Status.NORMAL);
     server.contextDestroyed(null);
-    Assert.assertEquals(server.getStatus(), Server.Status.SHUTDOWN);
+    assertEquals(server.getStatus(), Server.Status.SHUTDOWN);
   }
 
   @Test(expected = RuntimeException.class)

+ 13 - 12
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java

@@ -19,18 +19,19 @@
 package org.apache.hadoop.lib.util;
 
 
-import junit.framework.Assert;
-import org.apache.hadoop.test.HTestCase;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
 
 import java.util.ArrayList;
 import java.util.Arrays;
 
+import org.apache.hadoop.test.HTestCase;
+import org.junit.Test;
+
 public class TestCheck extends HTestCase {
 
   @Test
   public void notNullNotNull() {
-    Assert.assertEquals(Check.notNull("value", "name"), "value");
+    assertEquals(Check.notNull("value", "name"), "value");
   }
 
   @Test(expected = IllegalArgumentException.class)
@@ -79,7 +80,7 @@ public class TestCheck extends HTestCase {
 
   @Test
   public void notEmptyNotEmtpy() {
-    Assert.assertEquals(Check.notEmpty("value", "name"), "value");
+    assertEquals(Check.notEmpty("value", "name"), "value");
   }
 
   @Test(expected = IllegalArgumentException.class)
@@ -94,10 +95,10 @@ public class TestCheck extends HTestCase {
 
   @Test
   public void validIdentifierValid() throws Exception {
-    Assert.assertEquals(Check.validIdentifier("a", 1, ""), "a");
-    Assert.assertEquals(Check.validIdentifier("a1", 2, ""), "a1");
-    Assert.assertEquals(Check.validIdentifier("a_", 3, ""), "a_");
-    Assert.assertEquals(Check.validIdentifier("_", 1, ""), "_");
+    assertEquals(Check.validIdentifier("a", 1, ""), "a");
+    assertEquals(Check.validIdentifier("a1", 2, ""), "a1");
+    assertEquals(Check.validIdentifier("a_", 3, ""), "a_");
+    assertEquals(Check.validIdentifier("_", 1, ""), "_");
   }
 
   @Test(expected = IllegalArgumentException.class)
@@ -117,7 +118,7 @@ public class TestCheck extends HTestCase {
 
   @Test
   public void checkGTZeroGreater() {
-    Assert.assertEquals(Check.gt0(120, "test"), 120);
+    assertEquals(Check.gt0(120, "test"), 120);
   }
 
   @Test(expected = IllegalArgumentException.class)
@@ -132,8 +133,8 @@ public class TestCheck extends HTestCase {
 
   @Test
   public void checkGEZero() {
-    Assert.assertEquals(Check.ge0(120, "test"), 120);
-    Assert.assertEquals(Check.ge0(0, "test"), 0);
+    assertEquals(Check.ge0(120, "test"), 120);
+    assertEquals(Check.ge0(0, "test"), 0);
   }
 
   @Test(expected = IllegalArgumentException.class)

+ 29 - 27
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java

@@ -18,27 +18,29 @@
 
 package org.apache.hadoop.lib.util;
 
-import junit.framework.Assert;
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Test;
+
 public class TestConfigurationUtils {
 
   @Test
   public void constructors() throws Exception {
     Configuration conf = new Configuration(false);
-    Assert.assertEquals(conf.size(), 0);
+    assertEquals(conf.size(), 0);
 
     byte[] bytes = "<configuration><property><name>a</name><value>A</value></property></configuration>".getBytes();
     InputStream is = new ByteArrayInputStream(bytes);
     conf = new Configuration(false);
     ConfigurationUtils.load(conf, is);
-    Assert.assertEquals(conf.size(), 1);
-    Assert.assertEquals(conf.get("a"), "A");
+    assertEquals(conf.size(), 1);
+    assertEquals(conf.get("a"), "A");
   }
 
 
@@ -62,9 +64,9 @@ public class TestConfigurationUtils {
 
     ConfigurationUtils.copy(srcConf, targetConf);
 
-    Assert.assertEquals("valueFromSource", targetConf.get("testParameter1"));
-    Assert.assertEquals("valueFromSource", targetConf.get("testParameter2"));
-    Assert.assertEquals("valueFromTarget", targetConf.get("testParameter3"));
+    assertEquals("valueFromSource", targetConf.get("testParameter1"));
+    assertEquals("valueFromSource", targetConf.get("testParameter2"));
+    assertEquals("valueFromTarget", targetConf.get("testParameter3"));
   }
 
   @Test
@@ -80,13 +82,13 @@ public class TestConfigurationUtils {
 
     ConfigurationUtils.injectDefaults(srcConf, targetConf);
 
-    Assert.assertEquals("valueFromSource", targetConf.get("testParameter1"));
-    Assert.assertEquals("originalValueFromTarget", targetConf.get("testParameter2"));
-    Assert.assertEquals("originalValueFromTarget", targetConf.get("testParameter3"));
+    assertEquals("valueFromSource", targetConf.get("testParameter1"));
+    assertEquals("originalValueFromTarget", targetConf.get("testParameter2"));
+    assertEquals("originalValueFromTarget", targetConf.get("testParameter3"));
 
-    Assert.assertEquals("valueFromSource", srcConf.get("testParameter1"));
-    Assert.assertEquals("valueFromSource", srcConf.get("testParameter2"));
-    Assert.assertNull(srcConf.get("testParameter3"));
+    assertEquals("valueFromSource", srcConf.get("testParameter1"));
+    assertEquals("valueFromSource", srcConf.get("testParameter2"));
+    assertNull(srcConf.get("testParameter3"));
   }
 
 
@@ -95,11 +97,11 @@ public class TestConfigurationUtils {
     Configuration conf = new Configuration(false);
     conf.set("a", "A");
     conf.set("b", "${a}");
-    Assert.assertEquals(conf.getRaw("a"), "A");
-    Assert.assertEquals(conf.getRaw("b"), "${a}");
+    assertEquals(conf.getRaw("a"), "A");
+    assertEquals(conf.getRaw("b"), "${a}");
     conf = ConfigurationUtils.resolve(conf);
-    Assert.assertEquals(conf.getRaw("a"), "A");
-    Assert.assertEquals(conf.getRaw("b"), "A");
+    assertEquals(conf.getRaw("a"), "A");
+    assertEquals(conf.getRaw("b"), "A");
   }
 
   @Test
@@ -110,16 +112,16 @@ public class TestConfigurationUtils {
     conf.set("b", "${a}");
     conf.set("c", "${user.name}");
     conf.set("d", "${aaa}");
-    Assert.assertEquals(conf.getRaw("a"), "A");
-    Assert.assertEquals(conf.getRaw("b"), "${a}");
-    Assert.assertEquals(conf.getRaw("c"), "${user.name}");
-    Assert.assertEquals(conf.get("a"), "A");
-    Assert.assertEquals(conf.get("b"), "A");
-    Assert.assertEquals(conf.get("c"), userName);
-    Assert.assertEquals(conf.get("d"), "${aaa}");
+    assertEquals(conf.getRaw("a"), "A");
+    assertEquals(conf.getRaw("b"), "${a}");
+    assertEquals(conf.getRaw("c"), "${user.name}");
+    assertEquals(conf.get("a"), "A");
+    assertEquals(conf.get("b"), "A");
+    assertEquals(conf.get("c"), userName);
+    assertEquals(conf.get("d"), "${aaa}");
 
     conf.set("user.name", "foo");
-    Assert.assertEquals(conf.get("user.name"), "foo");
+    assertEquals(conf.get("user.name"), "foo");
   }
 
 }

+ 5 - 4
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java

@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.lib.wsrs;
 
-import junit.framework.Assert;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.InputStream;
 
+import org.junit.Test;
+
 public class TestInputStreamEntity {
 
   @Test
@@ -34,14 +35,14 @@ public class TestInputStreamEntity {
     InputStreamEntity i = new InputStreamEntity(is);
     i.write(baos);
     baos.close();
-    Assert.assertEquals(new String(baos.toByteArray()), "abc");
+    assertEquals(new String(baos.toByteArray()), "abc");
 
     is = new ByteArrayInputStream("abc".getBytes());
     baos = new ByteArrayOutputStream();
     i = new InputStreamEntity(is, 1, 1);
     i.write(baos);
     baos.close();
-    Assert.assertEquals(baos.toByteArray()[0], 'b');
+    assertEquals(baos.toByteArray()[0], 'b');
   }
 
 }

+ 10 - 7
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java

@@ -18,28 +18,31 @@
 
 package org.apache.hadoop.lib.wsrs;
 
-import junit.framework.Assert;
-import org.json.simple.JSONObject;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayOutputStream;
 import java.util.Map;
 
+import org.json.simple.JSONObject;
+import org.junit.Test;
+
 public class TestJSONMapProvider {
 
   @Test
   @SuppressWarnings("unchecked")
   public void test() throws Exception {
     JSONMapProvider p = new JSONMapProvider();
-    Assert.assertTrue(p.isWriteable(Map.class, null, null, null));
-    Assert.assertFalse(p.isWriteable(this.getClass(), null, null, null));
-    Assert.assertEquals(p.getSize(null, null, null, null, null), -1);
+    assertTrue(p.isWriteable(Map.class, null, null, null));
+    assertFalse(p.isWriteable(this.getClass(), null, null, null));
+    assertEquals(p.getSize(null, null, null, null, null), -1);
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     JSONObject json = new JSONObject();
     json.put("a", "A");
     p.writeTo(json, JSONObject.class, null, null, null, null, baos);
     baos.close();
-    Assert.assertEquals(new String(baos.toByteArray()).trim(), "{\"a\":\"A\"}");
+    assertEquals(new String(baos.toByteArray()).trim(), "{\"a\":\"A\"}");
   }
 
 }

+ 10 - 7
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java

@@ -18,27 +18,30 @@
 
 package org.apache.hadoop.lib.wsrs;
 
-import junit.framework.Assert;
-import org.json.simple.JSONObject;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayOutputStream;
 
+import org.json.simple.JSONObject;
+import org.junit.Test;
+
 public class TestJSONProvider {
 
   @Test
   @SuppressWarnings("unchecked")
   public void test() throws Exception {
     JSONProvider p = new JSONProvider();
-    Assert.assertTrue(p.isWriteable(JSONObject.class, null, null, null));
-    Assert.assertFalse(p.isWriteable(this.getClass(), null, null, null));
-    Assert.assertEquals(p.getSize(null, null, null, null, null), -1);
+    assertTrue(p.isWriteable(JSONObject.class, null, null, null));
+    assertFalse(p.isWriteable(this.getClass(), null, null, null));
+    assertEquals(p.getSize(null, null, null, null, null), -1);
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     JSONObject json = new JSONObject();
     json.put("a", "A");
     p.writeTo(json, JSONObject.class, null, null, null, null, baos);
     baos.close();
-    Assert.assertEquals(new String(baos.toByteArray()).trim(), "{\"a\":\"A\"}");
+    assertEquals(new String(baos.toByteArray()).trim(), "{\"a\":\"A\"}");
   }
 
 }

+ 15 - 13
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestParam.java

@@ -18,41 +18,43 @@
 
 package org.apache.hadoop.lib.wsrs;
 
-import junit.framework.Assert;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 import java.util.regex.Pattern;
 
+import org.junit.Test;
+
 public class TestParam {
 
   private <T> void test(Param<T> param, String name,
                    String domain, T defaultValue, T validValue,
                    String invalidStrValue, String outOfRangeValue) throws Exception {
 
-    Assert.assertEquals(name, param.getName());
-    Assert.assertEquals(domain, param.getDomain());
-    Assert.assertEquals(defaultValue, param.value());
-    Assert.assertEquals(defaultValue, param.parseParam(""));
-    Assert.assertEquals(defaultValue, param.parseParam(null));
-    Assert.assertEquals(validValue, param.parseParam(validValue.toString()));
+    assertEquals(name, param.getName());
+    assertEquals(domain, param.getDomain());
+    assertEquals(defaultValue, param.value());
+    assertEquals(defaultValue, param.parseParam(""));
+    assertEquals(defaultValue, param.parseParam(null));
+    assertEquals(validValue, param.parseParam(validValue.toString()));
     if (invalidStrValue != null) {
       try {
         param.parseParam(invalidStrValue);
-        Assert.fail();
+        fail();
       } catch (IllegalArgumentException ex) {
         //NOP
       } catch (Exception ex) {
-        Assert.fail();
+        fail();
       }
     }
     if (outOfRangeValue != null) {
       try {
         param.parseParam(outOfRangeValue);
-        Assert.fail();
+        fail();
       } catch (IllegalArgumentException ex) {
         //NOP
       } catch (Exception ex) {
-        Assert.fail();
+        fail();
       }
     }
    }
@@ -81,7 +83,7 @@ public class TestParam {
     param = new ShortParam("S", (short) 1, 8) {
     };
 
-    Assert.assertEquals(new Short((short)01777), param.parse("01777"));
+    assertEquals(new Short((short)01777), param.parse("01777"));
   }
 
   @Test

+ 19 - 15
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java

@@ -18,16 +18,20 @@
 
 package org.apache.hadoop.lib.wsrs;
 
-import com.sun.jersey.api.core.HttpContext;
-import com.sun.jersey.api.core.HttpRequestContext;
-import com.sun.jersey.core.spi.component.ComponentScope;
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+import java.security.Principal;
+
+import javax.ws.rs.core.MultivaluedMap;
+
 import org.junit.Test;
 import org.mockito.Mockito;
 import org.slf4j.MDC;
 
-import javax.ws.rs.core.MultivaluedMap;
-import java.security.Principal;
+import com.sun.jersey.api.core.HttpContext;
+import com.sun.jersey.api.core.HttpRequestContext;
+import com.sun.jersey.core.spi.component.ComponentScope;
 
 public class TestUserProvider {
 
@@ -43,8 +47,8 @@ public class TestUserProvider {
     HttpContext context = Mockito.mock(HttpContext.class);
     Mockito.when(context.getRequest()).thenReturn(request);
     UserProvider up = new UserProvider();
-    Assert.assertNull(up.getValue(context));
-    Assert.assertNull(MDC.get("user"));
+    assertNull(up.getValue(context));
+    assertNull(MDC.get("user"));
   }
 
   @Test
@@ -59,8 +63,8 @@ public class TestUserProvider {
     HttpContext context = Mockito.mock(HttpContext.class);
     Mockito.when(context.getRequest()).thenReturn(request);
     UserProvider up = new UserProvider();
-    Assert.assertEquals(up.getValue(context).getName(), "foo");
-    Assert.assertEquals(MDC.get("user"), "foo");
+    assertEquals(up.getValue(context).getName(), "foo");
+    assertEquals(MDC.get("user"), "foo");
   }
 
   @Test
@@ -77,15 +81,15 @@ public class TestUserProvider {
     HttpContext context = Mockito.mock(HttpContext.class);
     Mockito.when(context.getRequest()).thenReturn(request);
     UserProvider up = new UserProvider();
-    Assert.assertEquals(up.getValue(context).getName(), "bar");
-    Assert.assertEquals(MDC.get("user"), "bar");
+    assertEquals(up.getValue(context).getName(), "bar");
+    assertEquals(MDC.get("user"), "bar");
   }
 
   @Test
   public void getters() {
     UserProvider up = new UserProvider();
-    Assert.assertEquals(up.getScope(), ComponentScope.PerRequest);
-    Assert.assertEquals(up.getInjectable(null, null, Principal.class), up);
-    Assert.assertNull(up.getInjectable(null, null, String.class));
+    assertEquals(up.getScope(), ComponentScope.PerRequest);
+    assertEquals(up.getInjectable(null, null, Principal.class), up);
+    assertNull(up.getInjectable(null, null, String.class));
   }
 }

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java

@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.test;
 
-import junit.framework.Assert;
+import static org.junit.Assert.fail;
+
+import java.text.MessageFormat;
 
 import org.apache.hadoop.util.Time;
 import org.junit.Rule;
 import org.junit.rules.MethodRule;
 
-import java.text.MessageFormat;
-
 public abstract class HTestCase {
 
   public static final String TEST_WAITFOR_RATIO_PROP = "test.waitfor.ratio";
@@ -161,7 +161,7 @@ public abstract class HTestCase {
       }
       if (!eval) {
         if (failIfTimeout) {
-          Assert.fail(MessageFormat.format("Waiting timed out after [{0}] msec", timeout));
+          fail(MessageFormat.format("Waiting timed out after [{0}] msec", timeout));
         } else {
           System.out.println(MessageFormat.format("Waiting timed out after [{0}] msec", timeout));
         }

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java

@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.test;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.UserGroupInformation;
-
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+
 /**
  * Helper to configure FileSystemAccess user/group and proxyuser
  * configuration for testing using Java System properties.

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java

@@ -17,16 +17,16 @@
  */
 package org.apache.hadoop.test;
 
-import org.junit.Test;
-import org.junit.rules.MethodRule;
-import org.junit.runners.model.FrameworkMethod;
-import org.junit.runners.model.Statement;
-
 import java.io.File;
 import java.io.IOException;
 import java.text.MessageFormat;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.junit.Test;
+import org.junit.rules.MethodRule;
+import org.junit.runners.model.FrameworkMethod;
+import org.junit.runners.model.Statement;
+
 public class TestDirHelper implements MethodRule {
 
   @Test

+ 7 - 6
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java

@@ -17,14 +17,15 @@
  */
 package org.apache.hadoop.test;
 
-import junit.framework.Assert;
+import static org.junit.Assert.fail;
+
+import java.util.regex.Pattern;
+
 import org.junit.Test;
 import org.junit.rules.MethodRule;
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.Statement;
 
-import java.util.regex.Pattern;
-
 public class TestExceptionHelper implements MethodRule {
 
   @Test
@@ -41,7 +42,7 @@ public class TestExceptionHelper implements MethodRule {
           statement.evaluate();
           if (testExceptionAnnotation != null) {
             Class<? extends Throwable> klass = testExceptionAnnotation.exception();
-            Assert.fail("Expected Exception: " + klass.getSimpleName());
+            fail("Expected Exception: " + klass.getSimpleName());
           }
         } catch (Throwable ex) {
           if (testExceptionAnnotation != null) {
@@ -50,10 +51,10 @@ public class TestExceptionHelper implements MethodRule {
               String regExp = testExceptionAnnotation.msgRegExp();
               Pattern pattern = Pattern.compile(regExp);
               if (!pattern.matcher(ex.getMessage()).find()) {
-                Assert.fail("Expected Exception Message pattern: " + regExp + " got message: " + ex.getMessage());
+                fail("Expected Exception Message pattern: " + regExp + " got message: " + ex.getMessage());
               }
             } else {
-              Assert.fail("Expected Exception: " + klass.getSimpleName() + " got: " + ex.getClass().getSimpleName());
+              fail("Expected Exception: " + klass.getSimpleName() + " got: " + ex.getClass().getSimpleName());
             }
           } else {
             throw ex;

+ 28 - 25
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java

@@ -18,19 +18,9 @@
 
 package org.apache.hadoop.test;
 
-import junit.framework.Assert;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.Time;
-import org.junit.Test;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
@@ -39,6 +29,19 @@ import java.io.OutputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
 
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.Time;
+import org.junit.Test;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.servlet.Context;
+
 public class TestHFSTestCase extends HFSTestCase {
 
   @Test(expected = IllegalStateException.class)
@@ -69,7 +72,7 @@ public class TestHFSTestCase extends HFSTestCase {
   @Test
   @TestDir
   public void testDirAnnotation() throws Exception {
-    Assert.assertNotNull(TestDirHelper.getTestDir());
+    assertNotNull(TestDirHelper.getTestDir());
   }
 
   @Test
@@ -81,8 +84,8 @@ public class TestHFSTestCase extends HFSTestCase {
       }
     });
     long end = Time.now();
-    Assert.assertEquals(waited, 0, 50);
-    Assert.assertEquals(end - start - waited, 0, 50);
+    assertEquals(waited, 0, 50);
+    assertEquals(end - start - waited, 0, 50);
   }
 
   @Test
@@ -95,8 +98,8 @@ public class TestHFSTestCase extends HFSTestCase {
       }
     });
     long end = Time.now();
-    Assert.assertEquals(waited, -1);
-    Assert.assertEquals(end - start, 200, 50);
+    assertEquals(waited, -1);
+    assertEquals(end - start, 200, 50);
   }
 
   @Test
@@ -109,8 +112,8 @@ public class TestHFSTestCase extends HFSTestCase {
       }
     });
     long end = Time.now();
-    Assert.assertEquals(waited, -1);
-    Assert.assertEquals(end - start, 200 * getWaitForRatio(), 50 * getWaitForRatio());
+    assertEquals(waited, -1);
+    assertEquals(end - start, 200 * getWaitForRatio(), 50 * getWaitForRatio());
   }
 
   @Test
@@ -119,7 +122,7 @@ public class TestHFSTestCase extends HFSTestCase {
     long start = Time.now();
     sleep(100);
     long end = Time.now();
-    Assert.assertEquals(end - start, 100, 50);
+    assertEquals(end - start, 100, 50);
   }
 
   @Test
@@ -128,7 +131,7 @@ public class TestHFSTestCase extends HFSTestCase {
     long start = Time.now();
     sleep(100);
     long end = Time.now();
-    Assert.assertEquals(end - start, 100 * getWaitForRatio(), 50 * getWaitForRatio());
+    assertEquals(end - start, 100 * getWaitForRatio(), 50 * getWaitForRatio());
   }
 
   @Test
@@ -141,8 +144,8 @@ public class TestHFSTestCase extends HFSTestCase {
       os.write(new byte[]{1});
       os.close();
       InputStream is = fs.open(new Path(TestHdfsHelper.getHdfsTestDir(), "foo"));
-      Assert.assertEquals(is.read(), 1);
-      Assert.assertEquals(is.read(), -1);
+      assertEquals(is.read(), 1);
+      assertEquals(is.read(), -1);
       is.close();
     } finally {
       fs.close();
@@ -167,9 +170,9 @@ public class TestHFSTestCase extends HFSTestCase {
     server.start();
     URL url = new URL(TestJettyHelper.getJettyURL(), "/bar");
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
-    Assert.assertEquals(reader.readLine(), "foo");
+    assertEquals(reader.readLine(), "foo");
     reader.close();
   }
 

+ 23 - 21
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java

@@ -18,23 +18,25 @@
 
 package org.apache.hadoop.test;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 
-import org.apache.hadoop.util.Time;
-import org.junit.Test;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.net.HttpURLConnection;
 import java.net.URL;
 
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.util.Time;
+import org.junit.Test;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.servlet.Context;
+
 public class TestHTestCase extends HTestCase {
 
   @Test(expected = IllegalStateException.class)
@@ -55,7 +57,7 @@ public class TestHTestCase extends HTestCase {
   @Test
   @TestDir
   public void testDirAnnotation() throws Exception {
-    Assert.assertNotNull(TestDirHelper.getTestDir());
+    assertNotNull(TestDirHelper.getTestDir());
   }
 
   @Test
@@ -67,8 +69,8 @@ public class TestHTestCase extends HTestCase {
       }
     });
     long end = Time.now();
-    Assert.assertEquals(waited, 0, 50);
-    Assert.assertEquals(end - start - waited, 0, 50);
+    assertEquals(waited, 0, 50);
+    assertEquals(end - start - waited, 0, 50);
   }
 
   @Test
@@ -81,8 +83,8 @@ public class TestHTestCase extends HTestCase {
       }
     });
     long end = Time.now();
-    Assert.assertEquals(waited, -1);
-    Assert.assertEquals(end - start, 200, 50);
+    assertEquals(waited, -1);
+    assertEquals(end - start, 200, 50);
   }
 
   @Test
@@ -95,8 +97,8 @@ public class TestHTestCase extends HTestCase {
       }
     });
     long end = Time.now();
-    Assert.assertEquals(waited, -1);
-    Assert.assertEquals(end - start, 200 * getWaitForRatio(), 50 * getWaitForRatio());
+    assertEquals(waited, -1);
+    assertEquals(end - start, 200 * getWaitForRatio(), 50 * getWaitForRatio());
   }
 
   @Test
@@ -105,7 +107,7 @@ public class TestHTestCase extends HTestCase {
     long start = Time.now();
     sleep(100);
     long end = Time.now();
-    Assert.assertEquals(end - start, 100, 50);
+    assertEquals(end - start, 100, 50);
   }
 
   @Test
@@ -114,7 +116,7 @@ public class TestHTestCase extends HTestCase {
     long start = Time.now();
     sleep(100);
     long end = Time.now();
-    Assert.assertEquals(end - start, 100 * getWaitForRatio(), 50 * getWaitForRatio());
+    assertEquals(end - start, 100 * getWaitForRatio(), 50 * getWaitForRatio());
   }
 
   public static class MyServlet extends HttpServlet {
@@ -135,9 +137,9 @@ public class TestHTestCase extends HTestCase {
     server.start();
     URL url = new URL(TestJettyHelper.getJettyURL(), "/bar");
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
-    Assert.assertEquals(reader.readLine(), "foo");
+    assertEquals(reader.readLine(), "foo");
     reader.close();
   }
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java

@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.test;
 
+import java.io.File;
+import java.util.concurrent.atomic.AtomicInteger;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -26,9 +29,6 @@ import org.junit.Test;
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.Statement;
 
-import java.io.File;
-import java.util.concurrent.atomic.AtomicInteger;
-
 public class TestHdfsHelper extends TestDirHelper {
 
   @Test

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java

@@ -17,17 +17,17 @@
  */
 package org.apache.hadoop.test;
 
+import java.net.InetAddress;
+import java.net.MalformedURLException;
+import java.net.ServerSocket;
+import java.net.URL;
+
 import org.junit.Test;
 import org.junit.rules.MethodRule;
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.Statement;
 import org.mortbay.jetty.Server;
 
-import java.net.InetAddress;
-import java.net.MalformedURLException;
-import java.net.ServerSocket;
-import java.net.URL;
-
 public class TestJettyHelper implements MethodRule {
 
   @Test

+ 9 - 3
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/hdfs/TestRaidDfs.java

@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileNotFoundException;
@@ -28,8 +31,6 @@ import java.util.Random;
 import java.util.regex.Pattern;
 import java.util.zip.CRC32;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -44,8 +45,9 @@ import org.apache.hadoop.raid.RaidNode;
 import org.apache.hadoop.raid.RaidUtils;
 import org.apache.hadoop.raid.protocol.PolicyInfo.ErasureCodeType;
 import org.apache.hadoop.util.StringUtils;
+import org.junit.Test;
 
-public class TestRaidDfs extends TestCase {
+public class TestRaidDfs {
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
       "target/test-data")).getAbsolutePath();
   final static String LOG_DIR = "target/raidlog";
@@ -195,6 +197,7 @@ public class TestRaidDfs extends TestCase {
    * Create a file, corrupt several blocks in it and ensure that the file can be
    * read through DistributedRaidFileSystem by ReedSolomon coding.
    */
+  @Test
   public void testRaidDfsRs() throws Exception {
     LOG.info("Test testRaidDfs started.");
 
@@ -224,6 +227,7 @@ public class TestRaidDfs extends TestCase {
   /**
    * Test DistributedRaidFileSystem.readFully()
    */
+  @Test
   public void testReadFully() throws Exception {
     code = ErasureCodeType.XOR;
     stripeLength = 3;
@@ -268,6 +272,7 @@ public class TestRaidDfs extends TestCase {
    * Test that access time and mtime of a source file do not change after
    * raiding.
    */
+  @Test
   public void testAccessTime() throws Exception {
     LOG.info("Test testAccessTime started.");
 
@@ -300,6 +305,7 @@ public class TestRaidDfs extends TestCase {
    * Create a file, corrupt a block in it and ensure that the file can be
    * read through DistributedRaidFileSystem by XOR code.
    */
+  @Test
   public void testRaidDfsXor() throws Exception {
     LOG.info("Test testRaidDfs started.");
 

+ 17 - 17
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixer.java

@@ -17,6 +17,11 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
@@ -26,32 +31,27 @@ import java.util.List;
 import java.util.Random;
 import java.util.zip.CRC32;
 
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Test;
-import static org.junit.Assert.*;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.util.JarFinder;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Time;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.RaidDFSUtil;
+import org.apache.hadoop.hdfs.TestRaidDfs;
+import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.RaidDFSUtil;
-import org.apache.hadoop.hdfs.TestRaidDfs;
-import org.apache.hadoop.raid.RaidNode;
-import org.apache.hadoop.raid.RaidUtils;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.util.JarFinder;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
 
 public class TestBlockFixer {

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerBlockFixDist.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.raid;
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 public class TestBlockFixerBlockFixDist extends TestBlockFixer {
   @Test

+ 7 - 7
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerDistConcurrency.java

@@ -17,19 +17,19 @@
  */
 package org.apache.hadoop.raid;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Time;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.RaidDFSUtil;
 import org.apache.hadoop.hdfs.TestRaidDfs;
-import org.apache.hadoop.raid.RaidNode;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
 public class TestBlockFixerDistConcurrency extends TestBlockFixer {
   /**

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerGeneratedBlockDist.java

@@ -18,7 +18,6 @@
 package org.apache.hadoop.raid;
 
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 public class TestBlockFixerGeneratedBlockDist extends TestBlockFixer {
   /**

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestBlockFixerParityBlockFixDist.java

@@ -18,7 +18,6 @@
 package org.apache.hadoop.raid;
 
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 public class TestBlockFixerParityBlockFixDist extends TestBlockFixer {
   @Test

+ 11 - 6
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestDirectoryTraversal.java

@@ -17,27 +17,29 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
 import java.util.LinkedList;
 import java.util.List;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.util.Time;
-
 import org.apache.hadoop.raid.protocol.PolicyInfo;
+import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
-public class TestDirectoryTraversal extends TestCase {
+public class TestDirectoryTraversal {
   final static Log LOG = LogFactory.getLog(
                             "org.apache.hadoop.raid.TestDirectoryTraversal");
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
@@ -50,6 +52,7 @@ public class TestDirectoryTraversal extends TestCase {
   /**
    * Test basic enumeration.
    */
+  @Test
   public void testEnumeration() throws IOException {
     mySetup();
 
@@ -91,6 +94,7 @@ public class TestDirectoryTraversal extends TestCase {
     }
   }
 
+  @Test
   public void testSuspension() throws IOException {
     LOG.info("Starting testSuspension");
     mySetup();
@@ -128,6 +132,7 @@ public class TestDirectoryTraversal extends TestCase {
     }
   }
 
+  @Test
   public void testFileFilter() throws IOException {
     mySetup();
 

+ 9 - 3
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestErasureCodes.java

@@ -17,19 +17,22 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.util.HashSet;
 import java.util.Random;
 import java.util.Set;
 
 import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
-import junit.framework.TestCase;
-
-public class TestErasureCodes extends TestCase {
+public class TestErasureCodes {
   final int TEST_CODES = 100;
   final int TEST_TIMES = 1000;
   final Random RAND = new Random();
 
+  @Test
   public void testEncodeDecode() {
     for (int n = 0; n < TEST_CODES; n++) {
       int stripeSize = RAND.nextInt(99) + 1; // 1, 2, 3, ... 100
@@ -67,6 +70,7 @@ public class TestErasureCodes extends TestCase {
     }
   }
 
+  @Test
   public void testRSPerformance() {
     int stripeSize = 10;
     int paritySize = 4;
@@ -131,6 +135,7 @@ public class TestErasureCodes extends TestCase {
     assertTrue("Decode failed", java.util.Arrays.equals(copy, message[0]));
   }
 
+  @Test
   public void testXorPerformance() {
     java.util.Random RAND = new java.util.Random();
     int stripeSize = 10;
@@ -171,6 +176,7 @@ public class TestErasureCodes extends TestCase {
     assertTrue("Decode failed", java.util.Arrays.equals(copy, message[0]));
   }
 
+  @Test
   public void testComputeErrorLocations() {
     for (int i = 0; i < TEST_TIMES; ++i) {
       verifyErrorLocations(10, 4, 1);

+ 13 - 3
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestGaloisField.java

@@ -17,13 +17,15 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertTrue;
+
+import java.util.HashSet;
 import java.util.Random;
 import java.util.Set;
-import java.util.HashSet;
 
-import junit.framework.TestCase;
+import org.junit.Test;
 
-public class TestGaloisField extends TestCase {
+public class TestGaloisField {
 
   final int TEST_TIMES = 10000;
   final Random RAND = new Random();
@@ -40,6 +42,7 @@ public class TestGaloisField extends TestCase {
     return result;
   }
 
+  @Test
   public void testGetInstance() {
     GaloisField gf1 = GaloisField.getInstance(256, 285);
     GaloisField gf2 = GaloisField.getInstance();
@@ -52,6 +55,7 @@ public class TestGaloisField extends TestCase {
     assertTrue(gf5 == gf6);
   }
 
+  @Test
   public void testDistributivity() {
     for (int i = 0; i < TEST_TIMES; i++) {
       int a = RAND.nextInt(GF.getFieldSize());
@@ -64,6 +68,7 @@ public class TestGaloisField extends TestCase {
     }
   }
 
+  @Test
   public void testDevision() {
     for (int i = 0; i < TEST_TIMES; i++) {
       int a = RAND.nextInt(GF.getFieldSize());
@@ -77,6 +82,7 @@ public class TestGaloisField extends TestCase {
     }
   }
 
+  @Test
   public void testPower() {
     for (int i = 0; i < TEST_TIMES; i++) {
       int a = randGF();
@@ -90,6 +96,7 @@ public class TestGaloisField extends TestCase {
     }
   }
 
+  @Test
   public void testPolynomialDistributivity() {
     final int TEST_LEN = 15;
     for (int i = 0; i < TEST_TIMES; i++) {
@@ -103,6 +110,7 @@ public class TestGaloisField extends TestCase {
     }
   }
 
+  @Test
   public void testSubstitute() {
     final int TEST_LEN = 15;
     for (int i = 0; i < TEST_TIMES; i++) {
@@ -121,6 +129,7 @@ public class TestGaloisField extends TestCase {
     }
   }
 
+  @Test
   public void testSolveVandermondeSystem() {
     final int TEST_LEN = 15;
     for (int i = 0; i < TEST_TIMES; i++) {
@@ -151,6 +160,7 @@ public class TestGaloisField extends TestCase {
     }
   }
 
+  @Test
   public void testRemainder() {
     final int TEST_LEN = 15;
     for (int i = 0; i < TEST_TIMES; i++) {

+ 12 - 5
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestHarIndexParser.java

@@ -17,25 +17,30 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStreamWriter;
 import java.io.UnsupportedEncodingException;
 import java.nio.charset.Charset;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
-public class TestHarIndexParser extends TestCase {
+public class TestHarIndexParser {
   final static Log LOG = LogFactory.getLog(TestHarIndexParser.class);
   File indexFile = null;
 
-  protected void setUp() throws FileNotFoundException, IOException {
+  @Before
+  public void setUp() throws FileNotFoundException, IOException {
     LOG.info("TestHarIndexParser.setUp()");
     indexFile = File.createTempFile("harindex", ".tmp");
     indexFile.deleteOnExit();
@@ -51,12 +56,14 @@ public class TestHarIndexParser extends TestCase {
     out.close();
   }
 
-  protected void tearDown() {
+  @After
+  public void tearDown() {
     LOG.info("TestHarIndexParser.tearDown()");
     if (indexFile != null)
       indexFile.delete();
   }
 
+  @Test
   public void testHarIndexParser()
     throws UnsupportedEncodingException, IOException {
     LOG.info("testHarIndexParser started.");

+ 8 - 7
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidFilter.java

@@ -17,25 +17,25 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
-import java.io.IOException;
 import java.util.ArrayList;
-import java.util.List;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.raid.protocol.PolicyInfo;
 import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
-public class TestRaidFilter extends TestCase {
+public class TestRaidFilter {
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
       "target/test-data")).getAbsolutePath();
   final static Log LOG =
@@ -59,6 +59,7 @@ public class TestRaidFilter extends TestCase {
     if (dfs != null) { dfs.shutdown(); }
   }
 
+  @Test
   public void testLayeredPolicies() throws Exception {
     mySetup();
     Path src1 = new Path("/user/foo");

+ 10 - 8
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidHar.java

@@ -17,31 +17,32 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.File;
-import java.io.FileWriter;
 import java.io.FileNotFoundException;
+import java.io.FileWriter;
 import java.util.Random;
 
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.log4j.Level;
-
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.log4j.Level;
+import org.junit.Test;
 
 /**
  * If a file gets deleted, then verify that the parity file gets deleted too.
  */
-public class TestRaidHar extends TestCase {
+public class TestRaidHar {
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
      "target/test-data")).getAbsolutePath();
   final static String CONFIG_FILE = new File(TEST_DIR, 
@@ -182,6 +183,7 @@ public class TestRaidHar extends TestCase {
    * Test that parity files that do not have an associated master file
    * get deleted.
    */
+  @Test
   public void testRaidHar() throws Exception {
     LOG.info("Test testRaidHar  started.");
 

+ 15 - 10
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidNode.java

@@ -17,26 +17,26 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.File;
-import java.io.FileWriter;
 import java.io.FileNotFoundException;
+import java.io.FileWriter;
 import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 import java.util.zip.CRC32;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Time;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobContext;
@@ -45,14 +45,16 @@ import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.raid.protocol.PolicyInfo;
 import org.apache.hadoop.raid.protocol.PolicyList;
 import org.apache.hadoop.util.JarFinder;
-import org.apache.hadoop.raid.protocol.PolicyInfo.ErasureCodeType;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
 /**
   * Test the generation of parity blocks for files with different block
   * sizes. Also test that a data block can be regenerated from a raid stripe
   * using the parity block
   */
-public class TestRaidNode extends TestCase {
+public class TestRaidNode {
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
       "target/test-data")).getAbsolutePath();
   public static final String DistRaid_JAR = JarFinder.getJar(DistRaid.class);
@@ -258,6 +260,7 @@ public class TestRaidNode extends TestCase {
   /**
    * Test to run a filter
    */
+  @Test
   public void testPathFilter() throws Exception {
     LOG.info("Test testPathFilter started.");
 
@@ -513,6 +516,7 @@ public class TestRaidNode extends TestCase {
   /**
    * Test dist Raid
    */
+  @Test
   public void testDistRaid() throws Exception {
     LOG.info("Test testDistRaid started.");
     long targetReplication = 2;
@@ -664,6 +668,7 @@ public class TestRaidNode extends TestCase {
     }
   }
 
+  @Test
   public void testSuspendTraversal() throws Exception {
     LOG.info("Test testSuspendTraversal started.");
     long targetReplication = 2;

+ 16 - 23
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidPurge.java

@@ -17,48 +17,37 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
-import java.io.FileWriter;
 import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.GregorianCalendar;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Properties;
+import java.io.FileWriter;
 import java.util.Random;
-import java.util.zip.CRC32;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.log4j.Level;
-
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Time;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.raid.protocol.PolicyInfo;
-import org.apache.hadoop.raid.protocol.PolicyList;
 import org.apache.hadoop.hdfs.TestRaidDfs;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.raid.protocol.PolicyInfo;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
+import org.apache.log4j.Level;
+import org.junit.Test;
 
 /**
  * If a file gets deleted, then verify that the parity file gets deleted too.
  */
-public class TestRaidPurge extends TestCase {
+public class TestRaidPurge {
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
       "target/test-data")).getAbsolutePath();
   final static String CONFIG_FILE = new File(TEST_DIR, 
@@ -206,6 +195,7 @@ public class TestRaidPurge extends TestCase {
    * Test that parity files that do not have an associated master file
    * get deleted.
    */
+  @Test
   public void testPurge() throws Exception {
     LOG.info("Test testPurge  started.");
 
@@ -312,6 +302,7 @@ public class TestRaidPurge extends TestCase {
    * Create a file, wait for parity file to get HARed. Then modify the file,
    * wait for the HAR to get purged.
    */
+  @Test
   public void testPurgeHar() throws Exception {
     LOG.info("testPurgeHar started");
     int harDelay = 0;
@@ -381,6 +372,7 @@ public class TestRaidPurge extends TestCase {
    * Create parity file, delete original file's directory and then validate that
    * parity directory is automatically deleted.
    */
+  @Test
   public void testPurgeDirectory() throws Exception {
     long stripeLength = 5;
     long blockSize = 8192;
@@ -433,6 +425,7 @@ public class TestRaidPurge extends TestCase {
   /**
    * Test that an XOR parity file is removed when a RS parity file is detected.
    */
+  @Test
   public void testPurgePreference() throws Exception {
     createClusters(true);
     Path dir = new Path("/user/test/raidtest/");

+ 14 - 12
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidShell.java

@@ -17,34 +17,35 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Random;
 import java.util.zip.CRC32;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Time;
-import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.RaidDFSUtil;
+import org.apache.hadoop.hdfs.TestRaidDfs;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.TestRaidDfs;
-import org.apache.hadoop.hdfs.RaidDFSUtil;
-import org.apache.hadoop.raid.RaidNode;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.Test;
 
 
-public class TestRaidShell extends TestCase {
+public class TestRaidShell {
   final static Log LOG = LogFactory.getLog(
                             "org.apache.hadoop.raid.TestRaidShell");
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
@@ -65,6 +66,7 @@ public class TestRaidShell extends TestCase {
    * Create a file with three stripes, corrupt a block each in two stripes,
    * and wait for the the file to be fixed.
    */
+  @Test
   public void testBlockFix() throws Exception {
     LOG.info("Test testBlockFix started.");
     long blockSize = 8192L;

+ 12 - 15
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestRaidShellFsck.java

@@ -17,34 +17,31 @@
  */
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
-import java.io.FileWriter;
 import java.io.FileNotFoundException;
+import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Random;
 
-import org.junit.Test;
-import org.junit.After;
-import static org.junit.Assert.assertTrue;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.util.Time;
-import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.RaidDFSUtil;
+import org.apache.hadoop.hdfs.TestRaidDfs;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.TestRaidDfs;
-import org.apache.hadoop.hdfs.RaidDFSUtil;
-import org.apache.hadoop.raid.RaidNode;
-import org.apache.hadoop.raid.HarIndex;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.After;
+import org.junit.Test;
 
 
 public class TestRaidShellFsck {

+ 10 - 7
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestReedSolomonDecoder.java

@@ -18,27 +18,29 @@
 
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.RaidDFSUtil;
 import org.apache.hadoop.hdfs.TestRaidDfs;
+import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.mapred.Reporter;
+import org.junit.Test;
 
 
-public class TestReedSolomonDecoder extends TestCase {
+public class TestReedSolomonDecoder {
   final static Log LOG = LogFactory.getLog(
                             "org.apache.hadoop.raid.TestReedSolomonDecoder");
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
@@ -49,6 +51,7 @@ public class TestReedSolomonDecoder extends TestCase {
   MiniDFSCluster dfs = null;
   FileSystem fileSys = null;
 
+  @Test
   public void testDecoder() throws Exception {
     mySetup();
     int stripeSize = 10;

+ 6 - 16
hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java/org/apache/hadoop/raid/TestReedSolomonEncoder.java

@@ -18,34 +18,23 @@
 
 package org.apache.hadoop.raid;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.File;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.ClientProtocol;
-import org.apache.hadoop.hdfs.protocol.LocatedBlock;
-import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.protocol.Block;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.DistributedRaidFileSystem;
 import org.apache.hadoop.hdfs.TestRaidDfs;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.raid.RaidNode;
+import org.junit.Test;
 
 
-public class TestReedSolomonEncoder extends TestCase {
+public class TestReedSolomonEncoder {
   final static Log LOG = LogFactory.getLog(
                             "org.apache.hadoop.raid.TestReedSolomonEncoder");
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
@@ -57,6 +46,7 @@ public class TestReedSolomonEncoder extends TestCase {
   MiniDFSCluster dfs = null;
   FileSystem fileSys = null;
 
+  @Test
   public void testEncoder() throws Exception {
     mySetup();
     int stripeSize = 10;

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -109,6 +109,8 @@ Trunk (unreleased changes)
 
     HDFS-3630 Modify TestPersistBlocks to use both flush and hflush  (sanjay)
 
+    HDFS-3583. Convert remaining tests to Junit4. (Andrew Wang via atm)
+
   OPTIMIZATIONS
 
   BUG FIXES

+ 5 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java

@@ -17,7 +17,11 @@
  */
 package org.apache.hadoop.cli;
 
-import org.apache.hadoop.cli.util.*;
+import org.apache.hadoop.cli.util.CLICommandDFSAdmin;
+import org.apache.hadoop.cli.util.CLICommandTypes;
+import org.apache.hadoop.cli.util.CLITestCmd;
+import org.apache.hadoop.cli.util.CommandExecutor;
+import org.apache.hadoop.cli.util.FSCmdExecutor;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 
 public class CLITestCmdDFS extends CLITestCmd {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java

@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.cli;
 
+import static org.junit.Assert.assertTrue;
+
 import org.apache.hadoop.cli.util.CLICommand;
 import org.apache.hadoop.cli.util.CommandExecutor.Result;
 import org.apache.hadoop.fs.FileSystem;
@@ -27,7 +29,6 @@ import org.apache.hadoop.hdfs.HDFSPolicyProvider;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.junit.After;
-import static org.junit.Assert.assertTrue;
 import org.junit.Before;
 import org.junit.Test;
 

+ 17 - 10
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java

@@ -17,19 +17,23 @@
  */
 package org.apache.hadoop.fs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.IOException;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
-import junit.framework.TestCase;
+public class TestGlobPaths {
 
-public class TestGlobPaths extends TestCase {
-  
   static class RegexPathFilter implements PathFilter {
-    
+
     private final String regex;
     public RegexPathFilter(String regex) {
       this.regex = regex;
@@ -41,15 +45,15 @@ public class TestGlobPaths extends TestCase {
     }
 
   }
-  
+
   static private MiniDFSCluster dfsCluster;
   static private FileSystem fs;
   static final private int NUM_OF_PATHS = 4;
   static final String USER_DIR = "/user/"+System.getProperty("user.name");
   private Path[] path = new Path[NUM_OF_PATHS];
-  
-  @Override
-  protected void setUp() throws Exception {
+
+  @Before
+  public void setUp() throws Exception {
     try {
       Configuration conf = new HdfsConfiguration();
       dfsCluster = new MiniDFSCluster.Builder(conf).build();
@@ -59,13 +63,14 @@ public class TestGlobPaths extends TestCase {
     }
   }
   
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     if(dfsCluster!=null) {
       dfsCluster.shutdown();
     }
   }
   
+  @Test
   public void testPathFilter() throws IOException {
     try {
       String[] files = new String[] { USER_DIR + "/a", USER_DIR + "/a/b" };
@@ -78,6 +83,7 @@ public class TestGlobPaths extends TestCase {
     }
   }
   
+  @Test
   public void testPathFilterWithFixedLastComponent() throws IOException {
     try {
       String[] files = new String[] { USER_DIR + "/a", USER_DIR + "/a/b",
@@ -91,6 +97,7 @@ public class TestGlobPaths extends TestCase {
     }
   }
   
+  @Test
   public void testGlob() throws Exception {
     //pTestEscape(); // need to wait until HADOOP-1995 is fixed
     pTestJavaRegexSpecialChars();

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java

@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.fs;
 
+import static org.apache.hadoop.fs.FileContextTestHelper.exists;
+import static org.apache.hadoop.fs.FileContextTestHelper.getTestRootPath;
+
 import java.io.IOException;
 import java.net.URISyntaxException;
 
@@ -27,8 +30,8 @@ import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
+import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -37,8 +40,6 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import static org.apache.hadoop.fs.FileContextTestHelper.*;
-
 public class TestHDFSFileContextMainOperations extends
     FileContextMainOperationsBaseTest {
   private static MiniDFSCluster cluster;

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestResolveHdfsSymlink.java

@@ -28,7 +28,6 @@ import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;

+ 7 - 6
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestUrlStreamHandler.java

@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.fs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
@@ -25,19 +28,15 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
-import org.apache.hadoop.fs.Path;
+import org.junit.Test;
 
 /**
  * Test of the URL stream handler factory.
  */
-public class TestUrlStreamHandler extends TestCase {
+public class TestUrlStreamHandler {
 
   /**
    * Test opening and reading from an InputStream through a hdfs:// URL.
@@ -47,6 +46,7 @@ public class TestUrlStreamHandler extends TestCase {
    * 
    * @throws IOException
    */
+  @Test
   public void testDfsUrls() throws IOException {
 
     Configuration conf = new HdfsConfiguration();
@@ -105,6 +105,7 @@ public class TestUrlStreamHandler extends TestCase {
    * @throws IOException
    * @throws URISyntaxException
    */
+  @Test
   public void testFileUrls() throws IOException, URISyntaxException {
     // URLStreamHandler is already set in JVM by testDfsUrls() 
     Configuration conf = new HdfsConfiguration();

+ 2 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/loadGenerator/TestLoadGenerator.java

@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.fs.loadGenerator;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileReader;
@@ -27,9 +29,6 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-
-import static org.junit.Assert.*;
-
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;

+ 10 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/permission/TestStickyBit.java

@@ -17,9 +17,12 @@
  */
 package org.apache.hadoop.fs.permission;
 
-import java.io.IOException;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
-import junit.framework.TestCase;
+import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -32,8 +35,9 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.Test;
 
-public class TestStickyBit extends TestCase {
+public class TestStickyBit {
 
   static UserGroupInformation user1 = 
     UserGroupInformation.createUserForTesting("theDoctor", new String[] {"tardis"});
@@ -158,6 +162,7 @@ public class TestStickyBit extends TestCase {
     assertFalse(hdfs.getFileStatus(f).getPermission().getStickyBit());
   }
 
+  @Test
   public void testGeneralSBBehavior() throws IOException, InterruptedException {
     MiniDFSCluster cluster = null;
     try {
@@ -195,6 +200,7 @@ public class TestStickyBit extends TestCase {
    * Test that one user can't rename/move another user's file when the sticky
    * bit is set.
    */
+  @Test
   public void testMovingFiles() throws IOException, InterruptedException {
     MiniDFSCluster cluster = null;
 
@@ -243,6 +249,7 @@ public class TestStickyBit extends TestCase {
    * the sticky bit back on re-start, and that no extra sticky bits appear after
    * re-start.
    */
+  @Test
   public void testStickyBitPersistence() throws IOException {
     MiniDFSCluster cluster = null;
     try {

+ 16 - 24
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsDefaultValue.java

@@ -18,6 +18,19 @@
 package org.apache.hadoop.fs.viewfs;
 
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -25,38 +38,17 @@ import java.net.URISyntaxException;
 import javax.security.auth.login.LoginException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
 import org.apache.hadoop.fs.FsConstants;
-import org.apache.hadoop.fs.ContentSummary;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.fs.FsServerDefaults;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SYNCONCLOSE_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY;
-
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 /**
  * Tests for viewfs implementation of default fs level values.

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsFileStatusHdfs.java

@@ -23,6 +23,9 @@ package org.apache.hadoop.fs.viewfs;
  * Since viewfs has overlayed ViewFsFileStatus, we ran into
  * serialization problems. THis test is test the fix.
  */
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -40,11 +43,9 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.security.UserGroupInformation;
-
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 public class TestViewFsFileStatusHdfs {
   

+ 9 - 10
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/AppendTestUtil.java

@@ -17,13 +17,12 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.IOException;
 import java.io.OutputStream;
 import java.util.Random;
 
-import junit.framework.Assert;
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -121,16 +120,16 @@ public class AppendTestUtil {
       FSDataInputStream in = fs.open(p);
       if (in.getWrappedStream() instanceof DFSInputStream) {
         long len = ((DFSInputStream)in.getWrappedStream()).getFileLength();
-        TestCase.assertEquals(length, len);
+        assertEquals(length, len);
       } else {
-        TestCase.assertEquals(length, status.getLen());
+        assertEquals(length, status.getLen());
       }
       
       for(i++; i < length; i++) {
-        TestCase.assertEquals((byte)i, (byte)in.read());  
+        assertEquals((byte)i, (byte)in.read());  
       }
       i = -(int)length;
-      TestCase.assertEquals(-1, in.read()); //EOF  
+      assertEquals(-1, in.read()); //EOF  
       in.close();
     } catch(IOException ioe) {
       throw new IOException("p=" + p + ", length=" + length + ", i=" + i, ioe);
@@ -175,7 +174,7 @@ public class AppendTestUtil {
   private static void checkData(final byte[] actual, int from,
                                 final byte[] expected, String message) {
     for (int idx = 0; idx < actual.length; idx++) {
-      Assert.assertEquals(message+" byte "+(from+idx)+" differs. expected "+
+      assertEquals(message+" byte "+(from+idx)+" differs. expected "+
                    expected[from+idx]+" actual "+actual[idx],
                    expected[from+idx], actual[idx]);
       actual[idx] = 0;
@@ -189,7 +188,7 @@ public class AppendTestUtil {
       final FSDataOutputStream out = fs.create(p, (short)1);
       out.write(bytes);
       out.close();
-      Assert.assertEquals(bytes.length, fs.getFileStatus(p).getLen());
+      assertEquals(bytes.length, fs.getFileStatus(p).getLen());
     }
 
     for(int i = 2; i < 500; i++) {
@@ -197,7 +196,7 @@ public class AppendTestUtil {
       final FSDataOutputStream out = fs.append(p);
       out.write(bytes);
       out.close();
-      Assert.assertEquals(i*bytes.length, fs.getFileStatus(p).getLen());
+      assertEquals(i*bytes.length, fs.getFileStatus(p).getLen());
     }
   }
 }

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BenchmarkThroughput.java

@@ -36,7 +36,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-
 import org.apache.log4j.Level;
 
 /**

+ 10 - 9
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java

@@ -18,25 +18,26 @@
 
 package org.apache.hadoop.hdfs;
 
-import java.net.Socket;
-import java.net.InetSocketAddress;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.DataOutputStream;
-import java.util.Random;
-import java.util.List;
 import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.util.List;
+import java.util.Random;
 
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.net.NetUtils;
 
-import static org.junit.Assert.*;
-
 /**
  * A helper class to setup the cluster, and get to BlockReader and DataNode for a block.
  */

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java

@@ -55,7 +55,6 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem.Statistics;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DFSTestUtil.Builder;
 import org.apache.hadoop.hdfs.MiniDFSCluster.NameNodeInfo;
 import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/FileAppendTest4.java

@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfs;
 
+import java.io.IOException;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -27,8 +29,6 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.IOException;
-
 /** This is a comprehensive append test that tries
  * all combinations of file length and number of appended bytes
  * In each iteration, it creates a file of len1. Then reopen

+ 5 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAbandonBlock.java

@@ -17,18 +17,20 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
-
-import static org.junit.Assert.*;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java

@@ -17,27 +17,27 @@
 */
 package org.apache.hadoop.hdfs;
 
-import java.util.ArrayList;
+import static org.junit.Assert.assertEquals;
 
-import junit.framework.TestCase;
-import org.apache.hadoop.conf.Configuration;
+import java.util.ArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.junit.Test;
 
 /**
  * This test ensures that the balancer bandwidth is dynamically adjusted
  * correctly.
  */
-public class TestBalancerBandwidth extends TestCase {
+public class TestBalancerBandwidth {
   final static private Configuration conf = new Configuration();
   final static private int NUM_OF_DATANODES = 2;
   final static private int DEFAULT_BANDWIDTH = 1024*1024;
   public static final Log LOG = LogFactory.getLog(TestBalancerBandwidth.class);
 
+  @Test
   public void testBalancerBandwidth() throws Exception {
     /* Set bandwidthPerSec to a low value of 1M bps. */
     conf.setLong(

+ 8 - 9
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockMissingException.java

@@ -17,26 +17,24 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.BlockMissingException;
+import org.junit.Test;
 
-public class TestBlockMissingException extends TestCase {
+public class TestBlockMissingException {
   final static Log LOG = LogFactory.getLog("org.apache.hadoop.hdfs.TestBlockMissing");
   final static int NUM_DATANODES = 3;
 
@@ -47,6 +45,7 @@ public class TestBlockMissingException extends TestCase {
   /**
    * Test DFS Raid
    */
+  @Test
   public void testBlockMissingException() throws Exception {
     LOG.info("Test testBlockMissingException started.");
     long blockSize = 1024L;

+ 5 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java

@@ -17,24 +17,26 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.IOException;
 import java.util.ArrayList;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager;
+import org.junit.Test;
 
 /**
  * This class tests DatanodeDescriptor.getBlocksScheduled() at the
  * NameNode. This counter is supposed to keep track of blocks currently
  * scheduled to a datanode.
  */
-public class TestBlocksScheduledCounter extends TestCase {
+public class TestBlocksScheduledCounter {
 
+  @Test
   public void testBlocksScheduledCounter() throws IOException {
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(new HdfsConfiguration())
                                                .build();

+ 6 - 7
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientBlockVerification.java

@@ -18,21 +18,20 @@
 
 package org.apache.hadoop.hdfs;
 
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+
 import java.util.List;
 
-import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-import org.apache.hadoop.fs.Path;
 import org.apache.log4j.Level;
-
-import org.junit.Test;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.never;
+import org.junit.Test;
 
 public class TestClientBlockVerification {
 

+ 1 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientProtocolForPipelineRecovery.java

@@ -26,11 +26,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.io.IOUtils;
-
-import org.junit.Test;
 import org.junit.Assert;
+import org.junit.Test;
 
 /**
  * This tests pipeline recovery related client protocol works correct or not.

+ 10 - 14
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestConnCache.java

@@ -17,37 +17,33 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.spy;
+
+import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.Socket;
-import java.io.IOException;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DFSClient;
-import org.apache.hadoop.hdfs.DFSInputStream;
-import org.apache.hadoop.hdfs.SocketCache;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.io.IOUtils;
-
 import org.apache.hadoop.security.token.Token;
-import org.junit.Test;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import static org.junit.Assert.*;
-
+import org.junit.Test;
 import org.mockito.Matchers;
 import org.mockito.Mockito;
-import org.mockito.stubbing.Answer;
 import org.mockito.invocation.InvocationOnMock;
-import static org.mockito.Mockito.spy;
+import org.mockito.stubbing.Answer;
 
 /**
  * This class tests the client connection caching in a single node

+ 6 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestCrcCorruption.java

@@ -18,21 +18,23 @@
 
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.File;
-import java.io.RandomAccessFile;
 import java.io.IOException;
+import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 import java.util.Random;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.io.IOUtils;
+import org.junit.Test;
 
 /**
  * A JUnit test for corrupted file handling.

+ 12 - 7
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSAddressConfig.java

@@ -24,20 +24,25 @@
  */
 package org.apache.hadoop.hdfs;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import junit.framework.TestCase;
-import org.apache.hadoop.conf.Configuration;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_ADDRESS_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_IPC_ADDRESS_KEY;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
+import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
+import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.junit.Test;
 
 
-public class TestDFSAddressConfig extends TestCase {
+public class TestDFSAddressConfig {
 
+  @Test
   public void testDFSAddressConfig() throws IOException {
     Configuration conf = new HdfsConfiguration();
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientExcludedNodes.java

@@ -17,15 +17,15 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.io.OutputStream;
 
-import org.junit.*;
-import static org.junit.Assert.fail;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.junit.Test;
 
 
 /**

+ 16 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java

@@ -17,6 +17,10 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyLong;
 import static org.mockito.Matchers.anyString;
@@ -38,8 +42,6 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
@@ -75,6 +77,7 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.log4j.Level;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.internal.stubbing.answers.ThrowsException;
 import org.mockito.invocation.InvocationOnMock;
@@ -86,7 +89,7 @@ import com.google.common.base.Joiner;
  * These tests make sure that DFSClient retries fetching data from DFS
  * properly in case of errors.
  */
-public class TestDFSClientRetries extends TestCase {
+public class TestDFSClientRetries {
   private static final String ADDRESS = "0.0.0.0";
   final static private int PING_INTERVAL = 1000;
   final static private int MIN_SLEEP_TIME = 1000;
@@ -146,6 +149,7 @@ public class TestDFSClientRetries extends TestCase {
    * This makes sure that when DN closes clients socket after client had
    * successfully connected earlier, the data can still be fetched.
    */
+  @Test
   public void testWriteTimeoutAtDataNode() throws IOException,
                                                   InterruptedException { 
     final int writeTimeout = 100; //milliseconds.
@@ -198,6 +202,7 @@ public class TestDFSClientRetries extends TestCase {
    * of times trying to add a block
    */
   @SuppressWarnings("serial")
+  @Test
   public void testNotYetReplicatedErrors() throws IOException
   { 
     final String exceptionMsg = "Nope, not replicated yet...";
@@ -242,6 +247,7 @@ public class TestDFSClientRetries extends TestCase {
    * operation, and not over the lifetime of the stream. It is a regression
    * test for HDFS-127.
    */
+  @Test
   public void testFailuresArePerOperation() throws Exception
   {
     long fileSize = 4096;
@@ -317,6 +323,7 @@ public class TestDFSClientRetries extends TestCase {
    * a client to safely retry a call and still produce a correct
    * file. See HDFS-3031.
    */
+  @Test
   public void testIdempotentAllocateBlockAndClose() throws Exception {
     final String src = "/testIdempotentAllocateBlock";
     Path file = new Path(src);
@@ -457,6 +464,7 @@ public class TestDFSClientRetries extends TestCase {
   /**
    * Test that a DFSClient waits for random time before retry on busy blocks.
    */
+  @Test
   public void testDFSClientRetriesOnBusyBlocks() throws IOException {
     
     System.out.println("Testing DFSClient random waiting on busy blocks.");
@@ -700,6 +708,7 @@ public class TestDFSClientRetries extends TestCase {
     public int get() { return counter; }
   }
 
+  @Test
   public void testGetFileChecksum() throws Exception {
     final String f = "/testGetFileChecksum";
     final Path p = new Path(f);
@@ -736,6 +745,7 @@ public class TestDFSClientRetries extends TestCase {
    * RPC to the server and set rpcTimeout to less than n and ensure
    * that socketTimeoutException is obtained
    */
+  @Test
   public void testClientDNProtocolTimeout() throws IOException {
     final Server server = new TestServer(1, true);
     server.start();
@@ -770,6 +780,7 @@ public class TestDFSClientRetries extends TestCase {
    * read call, so the client should expect consecutive calls to behave the same
    * way. See HDFS-3067.
    */
+  @Test
   public void testRetryOnChecksumFailure()
       throws UnresolvedLinkException, IOException {
     HdfsConfiguration conf = new HdfsConfiguration();
@@ -812,6 +823,7 @@ public class TestDFSClientRetries extends TestCase {
   }
 
   /** Test client retry with namenode restarting. */
+  @Test
   public void testNamenodeRestart() throws Exception {
     ((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL);
 
@@ -937,6 +949,7 @@ public class TestDFSClientRetries extends TestCase {
     }
   }
 
+  @Test
   public void testMultipleLinearRandomRetry() {
     parseMultipleLinearRandomRetry(null, "");
     parseMultipleLinearRandomRetry(null, "11");

+ 10 - 5
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSFinalize.java

@@ -17,17 +17,21 @@
 */
 package org.apache.hadoop.hdfs;
 
+import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.DATA_NODE;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
 import java.io.File;
 import java.util.Collections;
 import java.util.List;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.DATA_NODE;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
+import org.junit.After;
+import org.junit.Test;
 
 import com.google.common.collect.Lists;
 
@@ -35,7 +39,7 @@ import com.google.common.collect.Lists;
  * This test ensures the appropriate response from the system when 
  * the system is finalized.
  */
-public class TestDFSFinalize extends TestCase {
+public class TestDFSFinalize {
  
   private static final Log LOG = LogFactory.getLog(
                                                    "org.apache.hadoop.hdfs.TestDFSFinalize");
@@ -86,6 +90,7 @@ public class TestDFSFinalize extends TestCase {
   /**
    * This test attempts to finalize the NameNode and DataNode.
    */
+  @Test
   public void testFinalize() throws Exception {
     UpgradeUtilities.initialize();
     
@@ -125,8 +130,8 @@ public class TestDFSFinalize extends TestCase {
     } // end numDir loop
   }
  
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     LOG.info("Shutting down MiniDFSCluster");
     if (cluster != null) cluster.shutdown();
   }

+ 11 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java

@@ -17,21 +17,27 @@
  */
 package org.apache.hadoop.hdfs;
 
-import junit.framework.TestCase;
-import java.io.*;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.DataOutputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.ParentNotDirectoryException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
 
 /**
  * This class tests that the DFS command mkdirs cannot create subdirectories
  * from a file when passed an illegal path.  HADOOP-281.
  */
-public class TestDFSMkdirs extends TestCase {
+public class TestDFSMkdirs {
 
   private void writeFile(FileSystem fileSys, Path name) throws IOException {
     DataOutputStream stm = fileSys.create(name);
@@ -43,6 +49,7 @@ public class TestDFSMkdirs extends TestCase {
    * Tests mkdirs can create a directory that does not exist and will
    * not create a subdirectory off a file.
    */
+  @Test
   public void testDFSMkdirs() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -80,6 +87,7 @@ public class TestDFSMkdirs extends TestCase {
   /**
    * Tests mkdir will not create directory when parent is missing.
    */
+  @Test
   public void testMkdir() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();

+ 15 - 8
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSPermission.java

@@ -17,14 +17,15 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 
-import junit.framework.AssertionFailedError;
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -35,13 +36,15 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.server.common.Util;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Time;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 /** Unit tests for permission */
-public class TestDFSPermission extends TestCase {
+public class TestDFSPermission {
   public static final Log LOG = LogFactory.getLog(TestDFSPermission.class);
   final private static Configuration conf = new HdfsConfiguration();
   
@@ -106,13 +109,13 @@ public class TestDFSPermission extends TestCase {
     }
   }
 
-  @Override
+  @Before
   public void setUp() throws IOException {
     cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
     cluster.waitActive();
   }
   
-  @Override
+  @After
   public void tearDown() throws IOException {
     if (cluster != null) {
       cluster.shutdown();
@@ -122,6 +125,7 @@ public class TestDFSPermission extends TestCase {
   /** This tests if permission setting in create, mkdir, and 
    * setPermission works correctly
    */
+  @Test
   public void testPermissionSetting() throws Exception {
     testPermissionSetting(OpType.CREATE); // test file creation
     testPermissionSetting(OpType.MKDIRS); // test directory creation
@@ -257,6 +261,7 @@ public class TestDFSPermission extends TestCase {
    * check that ImmutableFsPermission can be used as the argument
    * to setPermission
    */
+  @Test
   public void testImmutableFsPermission() throws IOException {
     fs = FileSystem.get(conf);
 
@@ -266,6 +271,7 @@ public class TestDFSPermission extends TestCase {
   }
   
   /* check if the ownership of a file/directory is set correctly */
+  @Test
   public void testOwnership() throws Exception {
     testOwnership(OpType.CREATE); // test file creation
     testOwnership(OpType.MKDIRS); // test directory creation
@@ -354,6 +360,7 @@ public class TestDFSPermission extends TestCase {
 
   /* Check if namenode performs permission checking correctly for
    * superuser, file owner, group owner, and other users */
+  @Test
   public void testPermissionChecking() throws Exception {
     try {
       fs = FileSystem.get(conf);
@@ -533,7 +540,7 @@ public class TestDFSPermission extends TestCase {
         } catch(AccessControlException e) {
           assertTrue(expectPermissionDeny());
         }
-      } catch (AssertionFailedError ae) {
+      } catch (AssertionError ae) {
         logPermissions();
         throw ae;
       }

+ 5 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRemove.java

@@ -16,6 +16,8 @@
  * limitations under the License.
  */
 package org.apache.hadoop.hdfs;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -26,8 +28,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
+import org.junit.Test;
 
-public class TestDFSRemove extends junit.framework.TestCase {
+public class TestDFSRemove {
   final Path dir = new Path("/test/remove/");
 
   void list(FileSystem fs, String name) throws IOException {
@@ -51,6 +54,7 @@ public class TestDFSRemove extends junit.framework.TestCase {
     return total;
   }
   
+  @Test
   public void testRemove() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();

+ 6 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRename.java

@@ -16,6 +16,9 @@
  * limitations under the License.
  */
 package org.apache.hadoop.hdfs;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -25,8 +28,9 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
+import org.junit.Test;
 
-public class TestDFSRename extends junit.framework.TestCase {
+public class TestDFSRename {
   static int countLease(MiniDFSCluster cluster) {
     return NameNodeAdapter.getLeaseManager(cluster.getNamesystem()).countLease();
   }
@@ -46,6 +50,7 @@ public class TestDFSRename extends junit.framework.TestCase {
     a_out.close();
   }
   
+  @Test
   public void testRename() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();

+ 10 - 6
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java

@@ -19,22 +19,25 @@ package org.apache.hadoop.hdfs;
 
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.DATA_NODE;
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.NAME_NODE;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.io.IOException;
 import java.util.Collections;
 import java.util.List;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
+import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
 import org.apache.hadoop.util.StringUtils;
+import org.junit.After;
+import org.junit.Test;
 
 import com.google.common.base.Charsets;
 import com.google.common.collect.Lists;
@@ -44,7 +47,7 @@ import com.google.common.collect.Lists;
 * the system when the system is rolled back under various storage state and
 * version conditions.
 */
-public class TestDFSRollback extends TestCase {
+public class TestDFSRollback {
  
   private static final Log LOG = LogFactory.getLog(
                                                    "org.apache.hadoop.hdfs.TestDFSRollback");
@@ -131,6 +134,7 @@ public class TestDFSRollback extends TestCase {
    * This test attempts to rollback the NameNode and DataNode under
    * a number of valid and invalid conditions.
    */
+  @Test
   public void testRollback() throws Exception {
     File[] baseDirs;
     UpgradeUtilities.initialize();
@@ -299,8 +303,8 @@ public class TestDFSRollback extends TestCase {
     }
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     LOG.info("Shutting down MiniDFSCluster");
     if (cluster != null) cluster.shutdown();
   }

+ 22 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java

@@ -17,6 +17,10 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.File;
@@ -33,8 +37,6 @@ import java.util.Random;
 import java.util.Scanner;
 import java.util.zip.GZIPOutputStream;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -53,11 +55,12 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
+import org.junit.Test;
 
 /**
  * This class tests commands from DFSShell.
  */
-public class TestDFSShell extends TestCase {
+public class TestDFSShell {
   private static final Log LOG = LogFactory.getLog(TestDFSShell.class);
   
   static final String TEST_ROOT_DIR =
@@ -94,6 +97,7 @@ public class TestDFSShell extends TestCase {
     System.out.println(Thread.currentThread().getStackTrace()[2] + " " + s);
   }
 
+  @Test
   public void testZeroSizeFile() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -136,6 +140,7 @@ public class TestDFSShell extends TestCase {
     }
   }
   
+  @Test
   public void testRecrusiveRm() throws IOException {
 	  Configuration conf = new HdfsConfiguration();
 	  MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -161,6 +166,7 @@ public class TestDFSShell extends TestCase {
     }
   }
     
+  @Test
   public void testDu() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -210,6 +216,7 @@ public class TestDFSShell extends TestCase {
     }
                                   
   }
+  @Test
   public void testPut() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -308,6 +315,7 @@ public class TestDFSShell extends TestCase {
 
 
   /** check command error outputs and exit statuses. */
+  @Test
   public void testErrOutPut() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = null;
@@ -448,6 +456,7 @@ public class TestDFSShell extends TestCase {
     }
   }
   
+  @Test
   public void testURIPaths() throws Exception {
     Configuration srcConf = new HdfsConfiguration();
     Configuration dstConf = new HdfsConfiguration();
@@ -540,6 +549,7 @@ public class TestDFSShell extends TestCase {
     }
   }
 
+  @Test
   public void testText() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = null;
@@ -614,6 +624,7 @@ public class TestDFSShell extends TestCase {
     }
   }
 
+  @Test
   public void testCopyToLocal() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -711,6 +722,7 @@ public class TestDFSShell extends TestCase {
     return path;
   }
 
+  @Test
   public void testCount() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -877,6 +889,7 @@ public class TestDFSShell extends TestCase {
     }
   }
   
+  @Test
   public void testFilePermissions() throws IOException {
     Configuration conf = new HdfsConfiguration();
     
@@ -942,6 +955,7 @@ public class TestDFSShell extends TestCase {
   /**
    * Tests various options of DFSShell.
    */
+  @Test
   public void testDFSShell() throws IOException {
     Configuration conf = new HdfsConfiguration();
     /* This tests some properties of ChecksumFileSystem as well.
@@ -1209,6 +1223,7 @@ public class TestDFSShell extends TestCase {
     String run(int exitcode, String... options) throws IOException;
   }
 
+  @Test
   public void testRemoteException() throws Exception {
     UserGroupInformation tmpUGI = 
       UserGroupInformation.createUserForTesting("tmpname", new String[] {"mygroup"});
@@ -1252,6 +1267,7 @@ public class TestDFSShell extends TestCase {
     }
   }
   
+  @Test
   public void testGet() throws IOException {
     DFSTestUtil.setLogLevel2All(FSInputChecker.LOG);
     final Configuration conf = new HdfsConfiguration();
@@ -1312,6 +1328,7 @@ public class TestDFSShell extends TestCase {
     }
   }
 
+  @Test
   public void testLsr() throws Exception {
     final Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -1369,6 +1386,7 @@ public class TestDFSShell extends TestCase {
    * and return -1 exit code.
    * @throws Exception
    */
+  @Test
   public void testInvalidShell() throws Exception {
     Configuration conf = new Configuration(); // default FS (non-DFS)
     DFSAdmin admin = new DFSAdmin();
@@ -1378,6 +1396,7 @@ public class TestDFSShell extends TestCase {
   }
 
   // force Copy Option is -f
+  @Test
   public void testCopyCommandsWithForceOption() throws Exception {
     Configuration conf = new Configuration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1)

+ 5 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShellGenericOptions.java

@@ -17,22 +17,24 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.PrintWriter;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.util.ToolRunner;
+import org.junit.Test;
 
-public class TestDFSShellGenericOptions extends TestCase {
+public class TestDFSShellGenericOptions {
 
+  @Test
   public void testDFSCommand() throws IOException {
     String namenode = null;
     MiniDFSCluster cluster = null;

+ 10 - 7
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStartupVersions.java

@@ -19,25 +19,27 @@ package org.apache.hadoop.hdfs;
 
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.DATA_NODE;
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.NAME_NODE;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.File;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
-import org.apache.hadoop.hdfs.server.common.Storage;
-import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
+import org.apache.hadoop.hdfs.server.common.Storage;
+import org.apache.hadoop.hdfs.server.common.StorageInfo;
+import org.junit.After;
+import org.junit.Test;
 
 /**
  * This test ensures the appropriate response (successful or failure) from 
  * a Datanode when the system is started with differing version combinations. 
  */
-public class TestDFSStartupVersions extends TestCase {
+public class TestDFSStartupVersions {
   
   private static final Log LOG = LogFactory.getLog(
                                                    "org.apache.hadoop.hdfs.TestDFSStartupVersions");
@@ -235,6 +237,7 @@ public class TestDFSStartupVersions extends TestCase {
    *         this iterations version 3-tuple
    * </pre>
    */
+  @Test
   public void testVersions() throws Exception {
     UpgradeUtilities.initialize();
     Configuration conf = UpgradeUtilities.initializeStorageStateConf(1, 
@@ -276,8 +279,8 @@ public class TestDFSStartupVersions extends TestCase {
     }
   }
   
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     LOG.info("Shutting down MiniDFSCluster");
     if (cluster != null) cluster.shutdown();
   }

+ 21 - 11
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStorageStateRecovery.java

@@ -17,25 +17,32 @@
 */
 package org.apache.hadoop.hdfs;
 
+import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.DATA_NODE;
+import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.NAME_NODE;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
-import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
+import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
-
-import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.NAME_NODE;
-import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.DATA_NODE;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
 * This test ensures the appropriate response (successful or failure) from
 * the system when the system is started under various storage state and
 * version conditions.
 */
-public class TestDFSStorageStateRecovery extends TestCase {
+public class TestDFSStorageStateRecovery {
  
   private static final Log LOG = LogFactory.getLog(
                                                    "org.apache.hadoop.hdfs.TestDFSStorageStateRecovery");
@@ -311,6 +318,7 @@ public class TestDFSStorageStateRecovery extends TestCase {
    * This test iterates over the testCases table and attempts
    * to startup the NameNode normally.
    */
+  @Test
   public void testNNStorageStates() throws Exception {
     String[] baseDirs;
 
@@ -354,6 +362,7 @@ public class TestDFSStorageStateRecovery extends TestCase {
    * This test iterates over the testCases table for Datanode storage and
    * attempts to startup the DataNode normally.
    */
+  @Test
   public void testDNStorageStates() throws Exception {
     String[] baseDirs;
 
@@ -394,6 +403,7 @@ public class TestDFSStorageStateRecovery extends TestCase {
    * This test iterates over the testCases table for block pool storage and
    * attempts to startup the DataNode normally.
    */
+  @Test
   public void testBlockPoolStorageStates() throws Exception {
     String[] baseDirs;
 
@@ -431,15 +441,15 @@ public class TestDFSStorageStateRecovery extends TestCase {
     } // end numDirs loop
   }
 
-  @Override
-  protected void setUp() throws Exception {
+  @Before
+  public void setUp() throws Exception {
     LOG.info("Setting up the directory structures.");
     UpgradeUtilities.initialize();
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     LOG.info("Shutting down MiniDFSCluster");
     if (cluster != null) cluster.shutdown();
   }
-}
+}

+ 8 - 7
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java

@@ -19,6 +19,13 @@ package org.apache.hadoop.hdfs;
 
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.DATA_NODE;
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType.NAME_NODE;
+import static org.apache.hadoop.hdfs.server.namenode.NNStorage.getImageFileName;
+import static org.apache.hadoop.hdfs.server.namenode.NNStorage.getInProgressEditsFileName;
+import static org.apache.hadoop.test.GenericTestUtils.assertExists;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.io.IOException;
@@ -27,14 +34,10 @@ import java.util.regex.Pattern;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
-import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.namenode.TestParallelImageWrite;
-import static org.apache.hadoop.hdfs.server.namenode.NNStorage.getInProgressEditsFileName;
-import static org.apache.hadoop.hdfs.server.namenode.NNStorage.getImageFileName;
-
-import static org.apache.hadoop.test.GenericTestUtils.assertExists;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -43,8 +46,6 @@ import org.junit.Test;
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
 
-import static org.junit.Assert.*;
-
 /**
 * This test ensures the appropriate response (successful or failure) from
 * the system when the system is upgraded under various storage state and

+ 16 - 5
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java

@@ -18,13 +18,22 @@
 
 package org.apache.hadoop.hdfs;
 
-import junit.framework.TestCase;
-import java.io.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.TreeMap;
 import java.util.zip.CRC32;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -34,8 +43,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.junit.Test;
 
 /**
  * This tests data transfer protocol handling in the Datanode. It sends
@@ -46,7 +54,7 @@ import org.apache.commons.logging.LogFactory;
  *   2) hadoop-dfs-dir.txt : checksums that are compared in this test.
  * Please read hadoop-dfs-dir.txt for more information.  
  */
-public class TestDFSUpgradeFromImage extends TestCase {
+public class TestDFSUpgradeFromImage {
   
   private static final Log LOG = LogFactory
       .getLog(TestDFSUpgradeFromImage.class);
@@ -182,6 +190,7 @@ public class TestDFSUpgradeFromImage extends TestCase {
    * Test that sets up a fake image from Hadoop 0.3.0 and tries to start a
    * NN, verifying that the correct error message is thrown.
    */
+  @Test
   public void testFailOnPreUpgradeImage() throws IOException {
     Configuration conf = new HdfsConfiguration();
 
@@ -225,6 +234,7 @@ public class TestDFSUpgradeFromImage extends TestCase {
   /**
    * Test upgrade from 0.22 image
    */
+  @Test
   public void testUpgradeFromRel22Image() throws IOException {
     unpackStorage(HADOOP22_IMAGE);
     upgradeAndVerify();
@@ -234,6 +244,7 @@ public class TestDFSUpgradeFromImage extends TestCase {
    * Test upgrade from 0.22 image with corrupt md5, make sure it
    * fails to upgrade
    */
+  @Test
   public void testUpgradeFromCorruptRel22Image() throws IOException {
     unpackStorage(HADOOP22_IMAGE);
     

+ 24 - 12
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java

@@ -18,10 +18,22 @@
 
 package org.apache.hadoop.hdfs;
 
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICES;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICE_ID;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.net.InetSocketAddress;
@@ -34,18 +46,18 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.protocol.LocatedBlock;
-import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-
-import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
+import org.junit.Before;
+import org.junit.Test;
 
 public class TestDFSUtil {
   

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java

@@ -20,7 +20,10 @@ package org.apache.hadoop.hdfs;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.InputStream;
 import java.io.PrintWriter;

+ 11 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java

@@ -18,6 +18,10 @@
 
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.RandomAccessFile;
@@ -28,8 +32,6 @@ import java.util.concurrent.TimeoutException;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -40,11 +42,12 @@ import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.Time;
+import org.junit.Test;
 
 /**
  * This test verifies that block verification occurs on the datanode
  */
-public class TestDatanodeBlockScanner extends TestCase {
+public class TestDatanodeBlockScanner {
   
   private static final Log LOG = 
                  LogFactory.getLog(TestDatanodeBlockScanner.class);
@@ -118,6 +121,7 @@ public class TestDatanodeBlockScanner extends TestCase {
     return verificationTime;
   }
 
+  @Test
   public void testDatanodeBlockScanner() throws IOException, TimeoutException {
     long startTime = Time.now();
     
@@ -168,6 +172,7 @@ public class TestDatanodeBlockScanner extends TestCase {
     return MiniDFSCluster.corruptReplica(replica, blk);
   }
 
+  @Test
   public void testBlockCorruptionPolicy() throws IOException {
     Configuration conf = new HdfsConfiguration();
     conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000L);
@@ -232,12 +237,14 @@ public class TestDatanodeBlockScanner extends TestCase {
    * 4. Test again waits until the block is reported with expected number
    *    of good replicas.
    */
+  @Test
   public void testBlockCorruptionRecoveryPolicy1() throws Exception {
     // Test recovery of 1 corrupt replica
     LOG.info("Testing corrupt replica recovery for one corrupt replica");
     blockCorruptionRecoveryPolicy(4, (short)3, 1);
   }
 
+  @Test
   public void testBlockCorruptionRecoveryPolicy2() throws Exception {
     // Test recovery of 2 corrupt replicas
     LOG.info("Testing corrupt replica recovery for two corrupt replicas");
@@ -302,6 +309,7 @@ public class TestDatanodeBlockScanner extends TestCase {
   }
   
   /** Test if NameNode handles truncated blocks in block report */
+  @Test
   public void testTruncatedBlockReport() throws Exception {
     final Configuration conf = new HdfsConfiguration();
     final short REPLICATION_FACTOR = (short)2;

+ 10 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeDeath.java

@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.hdfs;
 
-import java.io.IOException;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
-import junit.framework.TestCase;
+import java.io.IOException;
 
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
@@ -31,17 +32,18 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
 import org.apache.log4j.Level;
+import org.junit.Test;
 
 /**
  * This class tests that pipelines survive data node death and recovery.
  */
-public class TestDatanodeDeath extends TestCase {
+public class TestDatanodeDeath {
   {
     ((Log4JLogger)NameNode.stateChangeLog).getLogger().setLevel(Level.ALL);
     ((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL);
@@ -410,11 +412,15 @@ public class TestDatanodeDeath extends TestCase {
     }
   }
 
+  @Test
   public void testSimple0() throws IOException {simpleTest(0);}
 
+  @Test
   public void testSimple1() throws IOException {simpleTest(1);}
 
+  @Test
   public void testSimple2() throws IOException {simpleTest(2);}
 
+  @Test
   public void testComplex() throws IOException {complexTest();}
 }

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeRegistration.java

@@ -19,7 +19,8 @@ package org.apache.hadoop.hdfs;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
 
 import java.net.InetSocketAddress;
 

+ 7 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java

@@ -17,27 +17,30 @@
 */
 package org.apache.hadoop.hdfs;
 
+import static org.apache.hadoop.test.MetricsAsserts.assertGauge;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.junit.Assert.assertEquals;
+
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
 
-import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
-
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import static org.apache.hadoop.test.MetricsAsserts.*;
+import org.junit.Test;
 
 /**
  * This test ensures the all types of data node report work correctly.
  */
-public class TestDatanodeReport extends TestCase {
+public class TestDatanodeReport {
   final static private Configuration conf = new HdfsConfiguration();
   final static private int NUM_OF_DATANODES = 4;
     
   /**
    * This test attempts to different types of datanode report.
    */
+  @Test
   public void testDatanodeReport() throws Exception {
     conf.setInt(
         DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500); // 0.5s

+ 7 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDefaultNameNodePort.java

@@ -17,18 +17,20 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+
 import java.net.InetSocketAddress;
 import java.net.URI;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.junit.Test;
 
 /** Test NameNode port defaulting code. */
-public class TestDefaultNameNodePort extends TestCase {
+public class TestDefaultNameNodePort {
 
+  @Test
   public void testGetAddressFromString() throws Exception {
     assertEquals(NameNode.getAddress("foo").getPort(),
                  NameNode.DEFAULT_PORT);
@@ -40,6 +42,7 @@ public class TestDefaultNameNodePort extends TestCase {
                  555);
   }
 
+  @Test
   public void testGetAddressFromConf() throws Exception {
     Configuration conf = new HdfsConfiguration();
     FileSystem.setDefaultUri(conf, "hdfs://foo/");
@@ -50,6 +53,7 @@ public class TestDefaultNameNodePort extends TestCase {
     assertEquals(NameNode.getAddress(conf).getPort(), NameNode.DEFAULT_PORT);
   }
 
+  @Test
   public void testGetUri() {
     assertEquals(NameNode.getUri(new InetSocketAddress("foo", 555)),
                  URI.create("hdfs://foo:555"));

+ 5 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDeprecatedKeys.java

@@ -18,13 +18,15 @@
 
 package org.apache.hadoop.hdfs;
 
-import org.apache.hadoop.hdfs.DFSConfigKeys;
+import static org.junit.Assert.assertTrue;
+
 import org.apache.hadoop.conf.Configuration;
-import junit.framework.TestCase;
+import org.junit.Test;
 
-public class TestDeprecatedKeys extends TestCase {
+public class TestDeprecatedKeys {
  
   //Tests a deprecated key
+  @Test
   public void testDeprecatedKeys() throws Exception {
     Configuration conf = new HdfsConfiguration();
     conf.set("topology.script.file.name", "xyz");

+ 7 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSInputChecker.java

@@ -17,14 +17,16 @@
  */
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.RandomAccessFile;
 import java.util.Random;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -35,11 +37,12 @@ import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
+import org.junit.Test;
 
 /**
  * This class tests if FSInputChecker works correctly.
  */
-public class TestFSInputChecker extends TestCase {
+public class TestFSInputChecker {
   static final long seed = 0xDEADBEEFL;
   static final int BYTES_PER_SUM = 10;
   static final int BLOCK_SIZE = 2*BYTES_PER_SUM;
@@ -291,6 +294,7 @@ public class TestFSInputChecker extends TestCase {
     in.close();    
   }
   
+  @Test
   public void testFSInputChecker() throws Exception {
     Configuration conf = new HdfsConfiguration();
     conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);

+ 9 - 5
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java

@@ -17,21 +17,24 @@
  */
 package org.apache.hadoop.hdfs;
 
-import junit.framework.TestCase;
-import java.io.*;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
 import java.util.Random;
-import org.apache.hadoop.conf.Configuration;
-import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.junit.Test;
 
 /**
  * This class tests if FSOutputSummer works correctly.
  */
-public class TestFSOutputSummer extends TestCase {
+public class TestFSOutputSummer {
   private static final long seed = 0xDEADBEEFL;
   private static final int BYTES_PER_CHECKSUM = 10;
   private static final int BLOCK_SIZE = 2*BYTES_PER_CHECKSUM;
@@ -111,6 +114,7 @@ public class TestFSOutputSummer extends TestCase {
   /**
    * Test write opeation for output stream in DFS.
    */
+  @Test
   public void testFSOutputSummer() throws Exception {
     Configuration conf = new HdfsConfiguration();
     conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);

Kaikkia tiedostoja ei voida näyttää, sillä liian monta tiedostoa muuttui tässä diffissä