Pārlūkot izejas kodu

Revert "MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote."

This reverts commit 2c268cc9365851f5b02d967d13c8c0cbca850a86.
Akira Ajisaka 9 gadi atpakaļ
vecāks
revīzija
115be193df
80 mainītis faili ar 575 papildinājumiem un 840 dzēšanām
  1. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
  2. 4 14
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
  3. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
  4. 4 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
  5. 7 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
  6. 3 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java
  7. 1 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
  8. 0 10
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
  9. 7 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java
  10. 4 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
  11. 2 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
  12. 8 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
  13. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java
  14. 6 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java
  15. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java
  16. 4 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
  17. 0 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
  18. 4 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java
  19. 6 9
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
  20. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java
  21. 12 20
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
  22. 9 19
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
  23. 4 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java
  24. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java
  25. 2 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java
  26. 8 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java
  27. 11 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
  28. 3 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
  29. 5 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java
  30. 11 12
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
  31. 4 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java
  32. 27 19
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
  33. 7 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java
  34. 6 13
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java
  35. 9 22
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
  36. 15 18
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java
  37. 15 17
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
  38. 15 17
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
  39. 8 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
  40. 10 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
  41. 3 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
  42. 11 13
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java
  43. 8 12
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java
  44. 3 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
  45. 19 23
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java
  46. 7 17
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java
  47. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
  48. 4 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java
  49. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
  50. 2 0
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java
  51. 3 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
  52. 8 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java
  53. 3 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java
  54. 14 20
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java
  55. 22 27
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
  56. 4 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java
  57. 3 5
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
  58. 3 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java
  59. 14 9
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
  60. 7 10
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java
  61. 5 10
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java
  62. 4 14
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java
  63. 11 9
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java
  64. 7 14
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java
  65. 11 16
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java
  66. 18 21
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java
  67. 13 21
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java
  68. 22 30
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java
  69. 21 23
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java
  70. 6 18
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
  71. 4 13
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
  72. 11 24
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
  73. 5 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
  74. 2 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
  75. 2 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
  76. 3 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
  77. 5 10
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
  78. 13 17
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
  79. 7 11
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
  80. 1 8
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java

@@ -28,6 +28,8 @@ import java.io.PrintStream;
 import java.util.Date;
 import java.util.Date;
 import java.util.StringTokenizer;
 import java.util.StringTokenizer;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
@@ -37,9 +39,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.*;
 import org.junit.Ignore;
 import org.junit.Ignore;
-import org.junit.Test;
 
 
- /**
+/**
  * Distributed i/o benchmark.
  * Distributed i/o benchmark.
  * <p>
  * <p>
  * This test writes into or reads from a specified number of files.
  * This test writes into or reads from a specified number of files.
@@ -67,7 +68,7 @@ import org.junit.Test;
  * </ul>
  * </ul>
  */
  */
 @Ignore
 @Ignore
-public class DFSCIOTest {
+public class DFSCIOTest extends TestCase {
   // Constants
   // Constants
   private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
   private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
   private static final int TEST_TYPE_READ = 0;
   private static final int TEST_TYPE_READ = 0;
@@ -97,7 +98,6 @@ public class DFSCIOTest {
    * 
    * 
    * @throws Exception
    * @throws Exception
    */
    */
-  @Test
   public void testIOs() throws Exception {
   public void testIOs() throws Exception {
     testIOs(10, 10);
     testIOs(10, 10);
   }
   }

+ 4 - 14
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java

@@ -34,6 +34,8 @@ import java.util.HashMap;
 import java.net.InetSocketAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URI;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.conf.Configured;
@@ -48,15 +50,8 @@ import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
-
 
 
-public class TestFileSystem {
+public class TestFileSystem extends TestCase {
   private static final Log LOG = FileSystem.LOG;
   private static final Log LOG = FileSystem.LOG;
 
 
   private static Configuration conf = new Configuration();
   private static Configuration conf = new Configuration();
@@ -71,7 +66,6 @@ public class TestFileSystem {
   private static Path READ_DIR = new Path(ROOT, "fs_read");
   private static Path READ_DIR = new Path(ROOT, "fs_read");
   private static Path DATA_DIR = new Path(ROOT, "fs_data");
   private static Path DATA_DIR = new Path(ROOT, "fs_data");
 
 
-  @Test
   public void testFs() throws Exception {
   public void testFs() throws Exception {
     testFs(10 * MEGA, 100, 0);
     testFs(10 * MEGA, 100, 0);
   }
   }
@@ -96,7 +90,6 @@ public class TestFileSystem {
     fs.delete(READ_DIR, true);
     fs.delete(READ_DIR, true);
   }
   }
 
 
-  @Test
   public static void testCommandFormat() throws Exception {
   public static void testCommandFormat() throws Exception {
     // This should go to TestFsShell.java when it is added.
     // This should go to TestFsShell.java when it is added.
     CommandFormat cf;
     CommandFormat cf;
@@ -495,7 +488,6 @@ public class TestFileSystem {
     }
     }
   }
   }
 
 
-  @Test
   public void testFsCache() throws Exception {
   public void testFsCache() throws Exception {
     {
     {
       long now = System.currentTimeMillis();
       long now = System.currentTimeMillis();
@@ -569,7 +561,6 @@ public class TestFileSystem {
         + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
         + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
   }
   }
 
 
-  @Test
   public void testFsClose() throws Exception {
   public void testFsClose() throws Exception {
     {
     {
       Configuration conf = new Configuration();
       Configuration conf = new Configuration();
@@ -578,7 +569,6 @@ public class TestFileSystem {
     }
     }
   }
   }
 
 
-  @Test
   public void testFsShutdownHook() throws Exception {
   public void testFsShutdownHook() throws Exception {
     final Set<FileSystem> closed = Collections.synchronizedSet(new HashSet<FileSystem>());
     final Set<FileSystem> closed = Collections.synchronizedSet(new HashSet<FileSystem>());
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
@@ -610,7 +600,7 @@ public class TestFileSystem {
     assertTrue(closed.contains(fsWithoutAuto));
     assertTrue(closed.contains(fsWithoutAuto));
   }
   }
 
 
-  @Test
+
   public void testCacheKeysAreCaseInsensitive()
   public void testCacheKeysAreCaseInsensitive()
     throws Exception
     throws Exception
   {
   {

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java

@@ -23,18 +23,19 @@ import java.io.FileOutputStream;
 import java.io.OutputStreamWriter;
 import java.io.OutputStreamWriter;
 import java.io.File;
 import java.io.File;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.junit.After;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Before;
-import org.junit.Test;
 
 
 /**
 /**
  * Test Job History Log Analyzer.
  * Test Job History Log Analyzer.
  *
  *
  * @see JHLogAnalyzer
  * @see JHLogAnalyzer
  */
  */
-public class TestJHLA {
+public class TestJHLA extends TestCase {
   private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
   private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
   private String historyLog = System.getProperty("test.build.data", 
   private String historyLog = System.getProperty("test.build.data", 
                                   "build/test/data") + "/history/test.log";
                                   "build/test/data") + "/history/test.log";
@@ -132,7 +133,6 @@ public class TestJHLA {
   /**
   /**
    * Run log analyzer in test mode for file test.log.
    * Run log analyzer in test mode for file test.log.
    */
    */
-  @Test
   public void testJHLA() {
   public void testJHLA() {
     String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"};
     String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"};
     JHLogAnalyzer.main(args);
     JHLogAnalyzer.main(args);

+ 4 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java

@@ -32,25 +32,21 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.*;
 
 
+import junit.framework.TestCase;
 import org.apache.commons.logging.*;
 import org.apache.commons.logging.*;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
 
-public class TestSequenceFileMergeProgress {
+public class TestSequenceFileMergeProgress extends TestCase {
   private static final Log LOG = FileInputFormat.LOG;
   private static final Log LOG = FileInputFormat.LOG;
   private static final int RECORDS = 10000;
   private static final int RECORDS = 10000;
-
-  @Test
+  
   public void testMergeProgressWithNoCompression() throws IOException {
   public void testMergeProgressWithNoCompression() throws IOException {
     runTest(SequenceFile.CompressionType.NONE);
     runTest(SequenceFile.CompressionType.NONE);
   }
   }
 
 
-  @Test
   public void testMergeProgressWithRecordCompression() throws IOException {
   public void testMergeProgressWithRecordCompression() throws IOException {
     runTest(SequenceFile.CompressionType.RECORD);
     runTest(SequenceFile.CompressionType.RECORD);
   }
   }
 
 
-  @Test
   public void testMergeProgressWithBlockCompression() throws IOException {
   public void testMergeProgressWithBlockCompression() throws IOException {
     runTest(SequenceFile.CompressionType.BLOCK);
     runTest(SequenceFile.CompressionType.BLOCK);
   }
   }
@@ -96,7 +92,7 @@ public class TestSequenceFileMergeProgress {
       count++;
       count++;
     }
     }
     assertEquals(RECORDS, count);
     assertEquals(RECORDS, count);
-    assertEquals(1.0f, rIter.getProgress().get(), 0.0000);
+    assertEquals(1.0f, rIter.getProgress().get());
   }
   }
 
 
 }
 }

+ 7 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java

@@ -17,11 +17,10 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import junit.framework.TestCase;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
-import org.junit.After;
-import org.junit.Before;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.util.Map;
 import java.util.Map;
@@ -42,7 +41,7 @@ import java.util.Properties;
  * <p/>
  * <p/>
  * The DFS filesystem is formated before the testcase starts and after it ends.
  * The DFS filesystem is formated before the testcase starts and after it ends.
  */
  */
-public abstract class ClusterMapReduceTestCase {
+public abstract class ClusterMapReduceTestCase extends TestCase {
   private MiniDFSCluster dfsCluster = null;
   private MiniDFSCluster dfsCluster = null;
   private MiniMRCluster mrCluster = null;
   private MiniMRCluster mrCluster = null;
 
 
@@ -51,8 +50,9 @@ public abstract class ClusterMapReduceTestCase {
    *
    *
    * @throws Exception
    * @throws Exception
    */
    */
-  @Before
-  public void setUp() throws Exception {
+  protected void setUp() throws Exception {
+    super.setUp();
+
     startCluster(true, null);
     startCluster(true, null);
   }
   }
 
 
@@ -139,9 +139,9 @@ public abstract class ClusterMapReduceTestCase {
    *
    *
    * @throws Exception
    * @throws Exception
    */
    */
-  @After
-  public void tearDown() throws Exception {
+  protected void tearDown() throws Exception {
     stopCluster();
     stopCluster();
+    super.tearDown();
   }
   }
 
 
   /**
   /**

+ 3 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java

@@ -28,13 +28,13 @@ import org.apache.hadoop.ipc.TestRPC.TestImpl;
 import org.apache.hadoop.ipc.TestRPC.TestProtocol;
 import org.apache.hadoop.ipc.TestRPC.TestProtocol;
 import org.apache.hadoop.mapred.AuditLogger.Keys;
 import org.apache.hadoop.mapred.AuditLogger.Keys;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NetUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+
+import junit.framework.TestCase;
 
 
 /**
 /**
  * Tests {@link AuditLogger}.
  * Tests {@link AuditLogger}.
  */
  */
-public class TestAuditLogger {
+public class TestAuditLogger extends TestCase {
   private static final String USER = "test";
   private static final String USER = "test";
   private static final String OPERATION = "oper";
   private static final String OPERATION = "oper";
   private static final String TARGET = "tgt";
   private static final String TARGET = "tgt";
@@ -44,7 +44,6 @@ public class TestAuditLogger {
   /**
   /**
    * Test the AuditLog format with key-val pair.
    * Test the AuditLog format with key-val pair.
    */
    */
-  @Test
   public void testKeyValLogFormat() {
   public void testKeyValLogFormat() {
     StringBuilder actLog = new StringBuilder();
     StringBuilder actLog = new StringBuilder();
     StringBuilder expLog = new StringBuilder();
     StringBuilder expLog = new StringBuilder();
@@ -115,7 +114,6 @@ public class TestAuditLogger {
   /**
   /**
    * Test {@link AuditLogger} without IP set.
    * Test {@link AuditLogger} without IP set.
    */
    */
-  @Test
   public void testAuditLoggerWithoutIP() throws Exception {
   public void testAuditLoggerWithoutIP() throws Exception {
     // test without ip
     // test without ip
     testSuccessLogFormat(false);
     testSuccessLogFormat(false);
@@ -139,7 +137,6 @@ public class TestAuditLogger {
   /**
   /**
    * Test {@link AuditLogger} with IP set.
    * Test {@link AuditLogger} with IP set.
    */
    */
-  @Test
   public void testAuditLoggerWithIP() throws Exception {
   public void testAuditLoggerWithIP() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     // start the IPC server
     // start the IPC server

+ 1 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java

@@ -40,11 +40,6 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.junit.Ignore;
 import org.junit.Ignore;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNotNull;
 @Ignore
 @Ignore
 public class TestBadRecords extends ClusterMapReduceTestCase {
 public class TestBadRecords extends ClusterMapReduceTestCase {
   
   
@@ -211,8 +206,7 @@ public class TestBadRecords extends ClusterMapReduceTestCase {
     }
     }
     return processed;
     return processed;
   }
   }
-
-  @Test
+  
   public void testBadMapRed() throws Exception {
   public void testBadMapRed() throws Exception {
     JobConf conf = createJobConf();
     JobConf conf = createJobConf();
     conf.setMapperClass(BadMapper.class);
     conf.setMapperClass(BadMapper.class);

+ 0 - 10
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java

@@ -29,12 +29,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertFalse;
 public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
 public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
   public void _testMapReduce(boolean restart) throws Exception {
   public void _testMapReduce(boolean restart) throws Exception {
     OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
     OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
@@ -91,17 +85,14 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
 
 
   }
   }
 
 
-  @Test
   public void testMapReduce() throws Exception {
   public void testMapReduce() throws Exception {
     _testMapReduce(false);
     _testMapReduce(false);
   }
   }
 
 
-  @Test
   public void testMapReduceRestarting() throws Exception {
   public void testMapReduceRestarting() throws Exception {
     _testMapReduce(true);
     _testMapReduce(true);
   }
   }
 
 
-  @Test
   public void testDFSRestart() throws Exception {
   public void testDFSRestart() throws Exception {
     Path file = new Path(getInputDir(), "text.txt");
     Path file = new Path(getInputDir(), "text.txt");
     OutputStream os = getFileSystem().create(file);
     OutputStream os = getFileSystem().create(file);
@@ -118,7 +109,6 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
     
     
   }
   }
 
 
-  @Test
   public void testMRConfig() throws Exception {
   public void testMRConfig() throws Exception {
     JobConf conf = createJobConf();
     JobConf conf = createJobConf();
     assertNull(conf.get("xyz"));
     assertNull(conf.get("xyz"));

+ 7 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java

@@ -21,15 +21,15 @@ import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
 import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.Test;
 
 
+import junit.framework.TestCase;
 import java.io.*;
 import java.io.*;
 import java.util.*;
 import java.util.*;
 
 
 /** 
 /** 
  * TestCollect checks if the collect can handle simultaneous invocations.
  * TestCollect checks if the collect can handle simultaneous invocations.
  */
  */
-public class TestCollect
+public class TestCollect extends TestCase 
 {
 {
   final static Path OUTPUT_DIR = new Path("build/test/test.collect.output");
   final static Path OUTPUT_DIR = new Path("build/test/test.collect.output");
   static final int NUM_FEEDERS = 10;
   static final int NUM_FEEDERS = 10;
@@ -127,7 +127,7 @@ public class TestCollect
     conf.setNumMapTasks(1);
     conf.setNumMapTasks(1);
     conf.setNumReduceTasks(1);
     conf.setNumReduceTasks(1);
   }
   }
-  @Test
+  
   public void testCollect() throws IOException {
   public void testCollect() throws IOException {
     JobConf conf = new JobConf();
     JobConf conf = new JobConf();
     configure(conf);
     configure(conf);
@@ -144,5 +144,9 @@ public class TestCollect
       fs.delete(OUTPUT_DIR, true);
       fs.delete(OUTPUT_DIR, true);
     }
     }
   }
   }
+  
+  public static void main(String[] args) throws IOException {
+    new TestCollect().testCollect();
+  }
 }
 }
 
 

+ 4 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java

@@ -21,29 +21,28 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.junit.Ignore;
 import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
 
 
 /**
 /**
  * check for the job submission  options of 
  * check for the job submission  options of 
  * -libjars -files -archives
  * -libjars -files -archives
  */
  */
 @Ignore
 @Ignore
-public class TestCommandLineJobSubmission {
-  // Input output paths for this..
+public class TestCommandLineJobSubmission extends TestCase {
+  // Input output paths for this.. 
   // these are all dummy and does not test
   // these are all dummy and does not test
   // much in map reduce except for the command line
   // much in map reduce except for the command line
   // params 
   // params 
   static final Path input = new Path("/test/input/");
   static final Path input = new Path("/test/input/");
   static final Path output = new Path("/test/output");
   static final Path output = new Path("/test/output");
   File buildDir = new File(System.getProperty("test.build.data", "/tmp"));
   File buildDir = new File(System.getProperty("test.build.data", "/tmp"));
-  @Test
   public void testJobShell() throws Exception {
   public void testJobShell() throws Exception {
     MiniDFSCluster dfs = null;
     MiniDFSCluster dfs = null;
     MiniMRCluster mr = null;
     MiniMRCluster mr = null;

+ 2 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java

@@ -23,12 +23,11 @@ import org.apache.hadoop.mapred.lib.*;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper;
 import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper;
 import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
 import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
 
+import junit.framework.TestCase;
 import java.text.NumberFormat;
 import java.text.NumberFormat;
 
 
-public class TestFieldSelection {
+public class TestFieldSelection extends TestCase {
 
 
 private static NumberFormat idFormat = NumberFormat.getInstance();
 private static NumberFormat idFormat = NumberFormat.getInstance();
   static {
   static {
@@ -36,7 +35,6 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
     idFormat.setGroupingUsed(false);
     idFormat.setGroupingUsed(false);
   }
   }
 
 
-  @Test
   public void testFieldSelection() throws Exception {
   public void testFieldSelection() throws Exception {
     launch();
     launch();
   }
   }

+ 8 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java

@@ -17,14 +17,12 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.PathFilter;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.io.Writer;
 import java.io.Writer;
@@ -32,7 +30,7 @@ import java.io.OutputStreamWriter;
 import java.util.Set;
 import java.util.Set;
 import java.util.HashSet;
 import java.util.HashSet;
 
 
-public class TestFileInputFormatPathFilter {
+public class TestFileInputFormatPathFilter extends TestCase {
 
 
   public static class DummyFileInputFormat extends FileInputFormat {
   public static class DummyFileInputFormat extends FileInputFormat {
 
 
@@ -57,12 +55,12 @@ public class TestFileInputFormatPathFilter {
       new Path(new Path(System.getProperty("test.build.data", "."), "data"),
       new Path(new Path(System.getProperty("test.build.data", "."), "data"),
           "TestFileInputFormatPathFilter");
           "TestFileInputFormatPathFilter");
 
 
-  @Before
+
   public void setUp() throws Exception {
   public void setUp() throws Exception {
     tearDown();
     tearDown();
     localFs.mkdirs(workDir);
     localFs.mkdirs(workDir);
   }
   }
-  @After
+
   public void tearDown() throws Exception {
   public void tearDown() throws Exception {
     if (localFs.exists(workDir)) {
     if (localFs.exists(workDir)) {
       localFs.delete(workDir, true);
       localFs.delete(workDir, true);
@@ -131,19 +129,18 @@ public class TestFileInputFormatPathFilter {
     assertEquals(createdFiles, computedFiles);
     assertEquals(createdFiles, computedFiles);
   }
   }
 
 
-  @Test
   public void testWithoutPathFilterWithoutGlob() throws Exception {
   public void testWithoutPathFilterWithoutGlob() throws Exception {
     _testInputFiles(false, false);
     _testInputFiles(false, false);
   }
   }
-  @Test
+
   public void testWithoutPathFilterWithGlob() throws Exception {
   public void testWithoutPathFilterWithGlob() throws Exception {
     _testInputFiles(false, true);
     _testInputFiles(false, true);
   }
   }
-  @Test
+
   public void testWithPathFilterWithoutGlob() throws Exception {
   public void testWithPathFilterWithoutGlob() throws Exception {
     _testInputFiles(true, false);
     _testInputFiles(true, false);
   }
   }
-  @Test
+
   public void testWithPathFilterWithGlob() throws Exception {
   public void testWithPathFilterWithGlob() throws Exception {
     _testInputFiles(true, true);
     _testInputFiles(true, true);
   }
   }

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java

@@ -20,11 +20,10 @@ package org.apache.hadoop.mapred;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.net.NetworkTopology;
 import org.apache.hadoop.net.NetworkTopology;
 
 
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
+
+public class TestGetSplitHosts extends TestCase {
 
 
-public class TestGetSplitHosts {
-  @Test
   public void testGetSplitHosts() throws Exception {
   public void testGetSplitHosts() throws Exception {
 
 
     int numBlocks = 3;
     int numBlocks = 3;

+ 6 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java

@@ -21,12 +21,11 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
-import org.junit.Test;
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertEquals;
 
 
-public class TestIFileStreams {
-  @Test
+import junit.framework.TestCase;
+
+public class TestIFileStreams extends TestCase {
+
   public void testIFileStream() throws Exception {
   public void testIFileStream() throws Exception {
     final int DLEN = 100;
     final int DLEN = 100;
     DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
     DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
@@ -43,7 +42,7 @@ public class TestIFileStreams {
     }
     }
     ifis.close();
     ifis.close();
   }
   }
-  @Test
+
   public void testBadIFileStream() throws Exception {
   public void testBadIFileStream() throws Exception {
     final int DLEN = 100;
     final int DLEN = 100;
     DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
     DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
@@ -74,7 +73,7 @@ public class TestIFileStreams {
     }
     }
     fail("Did not detect bad data in checksum");
     fail("Did not detect bad data in checksum");
   }
   }
-  @Test
+
   public void testBadLength() throws Exception {
   public void testBadLength() throws Exception {
     final int DLEN = 100;
     final int DLEN = 100;
     DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
     DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java

@@ -17,15 +17,14 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
 
-public class TestInputPath {
-  @Test
+public class TestInputPath extends TestCase {
   public void testInputPath() throws Exception {
   public void testInputPath() throws Exception {
     JobConf jobConf = new JobConf();
     JobConf jobConf = new JobConf();
     Path workingDir = jobConf.getWorkingDirectory();
     Path workingDir = jobConf.getWorkingDirectory();

+ 4 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java

@@ -26,6 +26,8 @@ import java.io.Writer;
 import java.util.Iterator;
 import java.util.Iterator;
 import java.util.StringTokenizer;
 import java.util.StringTokenizer;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileUtil;
@@ -34,11 +36,8 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.serializer.JavaSerializationComparator;
 import org.apache.hadoop.io.serializer.JavaSerializationComparator;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
 
 
-public class TestJavaSerialization {
+public class TestJavaSerialization extends TestCase {
 
 
   private static String TEST_ROOT_DIR =
   private static String TEST_ROOT_DIR =
     new File(System.getProperty("test.build.data", "/tmp")).toURI()
     new File(System.getProperty("test.build.data", "/tmp")).toURI()
@@ -91,7 +90,7 @@ public class TestJavaSerialization {
     wr.write("b a\n");
     wr.write("b a\n");
     wr.close();
     wr.close();
   }
   }
-  @Test
+  
   public void testMapReduceJob() throws Exception {
   public void testMapReduceJob() throws Exception {
 
 
     JobConf conf = new JobConf(TestJavaSerialization.class);
     JobConf conf = new JobConf(TestJavaSerialization.class);
@@ -150,7 +149,6 @@ public class TestJavaSerialization {
    * coupled to Writable types, if so, the job will fail.
    * coupled to Writable types, if so, the job will fail.
    *
    *
    */
    */
-  @Test
   public void testWriteToSequencefile() throws Exception {
   public void testWriteToSequencefile() throws Exception {
     JobConf conf = new JobConf(TestJavaSerialization.class);
     JobConf conf = new JobConf(TestJavaSerialization.class);
     conf.setJobName("JavaSerialization");
     conf.setJobName("JavaSerialization");

+ 0 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java

@@ -29,13 +29,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-
 public class TestJobName extends ClusterMapReduceTestCase {
 public class TestJobName extends ClusterMapReduceTestCase {
 
 
-  @Test
   public void testComplexName() throws Exception {
   public void testComplexName() throws Exception {
     OutputStream os = getFileSystem().create(new Path(getInputDir(),
     OutputStream os = getFileSystem().create(new Path(getInputDir(),
         "text.txt"));
         "text.txt"));
@@ -70,7 +65,6 @@ public class TestJobName extends ClusterMapReduceTestCase {
     reader.close();
     reader.close();
   }
   }
 
 
-  @Test
   public void testComplexNameWithRegex() throws Exception {
   public void testComplexNameWithRegex() throws Exception {
     OutputStream os = getFileSystem().create(new Path(getInputDir(),
     OutputStream os = getFileSystem().create(new Path(getInputDir(),
         "text.txt"));
         "text.txt"));

+ 4 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java

@@ -21,6 +21,8 @@ package org.apache.hadoop.mapred;
 import java.io.DataOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -30,15 +32,11 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
 
 
 /**
 /**
  * A JUnit test to test Job System Directory with Mini-DFS.
  * A JUnit test to test Job System Directory with Mini-DFS.
  */
  */
-public class TestJobSysDirWithDFS {
+public class TestJobSysDirWithDFS extends TestCase {
   private static final Log LOG =
   private static final Log LOG =
     LogFactory.getLog(TestJobSysDirWithDFS.class.getName());
     LogFactory.getLog(TestJobSysDirWithDFS.class.getName());
   
   
@@ -117,7 +115,7 @@ public class TestJobSysDirWithDFS {
     //  between Job Client & Job Tracker
     //  between Job Client & Job Tracker
     assertTrue(result.job.isSuccessful());
     assertTrue(result.job.isSuccessful());
   }
   }
-  @Test
+
   public void testWithDFS() throws IOException {
   public void testWithDFS() throws IOException {
     MiniDFSCluster dfs = null;
     MiniDFSCluster dfs = null;
     MiniMRCluster mr = null;
     MiniMRCluster mr = null;

+ 6 - 9
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.mapred;
 
 
 import java.io.*;
 import java.io.*;
 import java.util.*;
 import java.util.*;
+import junit.framework.TestCase;
 
 
 import org.apache.commons.logging.*;
 import org.apache.commons.logging.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.*;
@@ -27,11 +28,8 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.compress.*;
 import org.apache.hadoop.io.compress.*;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 
 
-public class TestKeyValueTextInputFormat {
+public class TestKeyValueTextInputFormat extends TestCase {
   private static final Log LOG =
   private static final Log LOG =
     LogFactory.getLog(TestKeyValueTextInputFormat.class.getName());
     LogFactory.getLog(TestKeyValueTextInputFormat.class.getName());
 
 
@@ -49,7 +47,7 @@ public class TestKeyValueTextInputFormat {
   private static Path workDir = 
   private static Path workDir = 
     new Path(new Path(System.getProperty("test.build.data", "."), "data"),
     new Path(new Path(System.getProperty("test.build.data", "."), "data"),
              "TestKeyValueTextInputFormat");
              "TestKeyValueTextInputFormat");
-  @Test
+  
   public void testFormat() throws Exception {
   public void testFormat() throws Exception {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     Path file = new Path(workDir, "test.txt");
     Path file = new Path(workDir, "test.txt");
@@ -136,7 +134,7 @@ public class TestKeyValueTextInputFormat {
                                            (str.getBytes("UTF-8")), 
                                            (str.getBytes("UTF-8")), 
                                            defaultConf);
                                            defaultConf);
   }
   }
-  @Test
+  
   public void testUTF8() throws Exception {
   public void testUTF8() throws Exception {
     LineReader in = null;
     LineReader in = null;
 
 
@@ -155,7 +153,7 @@ public class TestKeyValueTextInputFormat {
       }
       }
     }
     }
   }
   }
-  @Test
+
   public void testNewLines() throws Exception {
   public void testNewLines() throws Exception {
     LineReader in = null;
     LineReader in = null;
     try {
     try {
@@ -221,8 +219,7 @@ public class TestKeyValueTextInputFormat {
   /**
   /**
    * Test using the gzip codec for reading
    * Test using the gzip codec for reading
    */
    */
-  @Test
-  public void testGzip() throws IOException {
+  public static void testGzip() throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     CompressionCodec gzip = new GzipCodec();
     CompressionCodec gzip = new GzipCodec();
     ReflectionUtils.setConf(gzip, job);
     ReflectionUtils.setConf(gzip, job);

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java

@@ -35,15 +35,14 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.lib.LazyOutputFormat;
 import org.apache.hadoop.mapred.lib.LazyOutputFormat;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
 
 
 /**
 /**
  * A JUnit test to test the Map-Reduce framework's feature to create part
  * A JUnit test to test the Map-Reduce framework's feature to create part
  * files only if there is an explicit output.collect. This helps in preventing
  * files only if there is an explicit output.collect. This helps in preventing
  * 0 byte files
  * 0 byte files
  */
  */
-public class TestLazyOutput {
+public class TestLazyOutput extends TestCase {
   private static final int NUM_HADOOP_SLAVES = 3;
   private static final int NUM_HADOOP_SLAVES = 3;
   private static final int NUM_MAPS_PER_NODE = 2;
   private static final int NUM_MAPS_PER_NODE = 2;
   private static final Path INPUT = new Path("/testlazy/input");
   private static final Path INPUT = new Path("/testlazy/input");
@@ -133,7 +132,7 @@ public class TestLazyOutput {
     }
     }
   }
   }
 
 
-  @Test
+
   public void testLazyOutput() throws Exception {
   public void testLazyOutput() throws Exception {
     MiniDFSCluster dfs = null;
     MiniDFSCluster dfs = null;
     MiniMRCluster mr = null;
     MiniMRCluster mr = null;

+ 12 - 20
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java

@@ -17,6 +17,16 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.concurrent.TimeoutException;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -26,21 +36,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
-import org.junit.After;
-import org.junit.Test;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.concurrent.TimeoutException;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 
 @SuppressWarnings("deprecation")
 @SuppressWarnings("deprecation")
-public class TestMRCJCFileInputFormat {
+public class TestMRCJCFileInputFormat extends TestCase {
 
 
   Configuration conf = new Configuration();
   Configuration conf = new Configuration();
   MiniDFSCluster dfs = null;
   MiniDFSCluster dfs = null;
@@ -52,7 +50,6 @@ public class TestMRCJCFileInputFormat {
         .build();
         .build();
   }
   }
 
 
-  @Test
   public void testLocality() throws Exception {
   public void testLocality() throws Exception {
     JobConf job = new JobConf(conf);
     JobConf job = new JobConf(conf);
     dfs = newDFSCluster(job);
     dfs = newDFSCluster(job);
@@ -112,7 +109,6 @@ public class TestMRCJCFileInputFormat {
     DFSTestUtil.waitReplication(fs, path, replication);
     DFSTestUtil.waitReplication(fs, path, replication);
   }
   }
 
 
-  @Test
   public void testNumInputs() throws Exception {
   public void testNumInputs() throws Exception {
     JobConf job = new JobConf(conf);
     JobConf job = new JobConf(conf);
     dfs = newDFSCluster(job);
     dfs = newDFSCluster(job);
@@ -161,7 +157,6 @@ public class TestMRCJCFileInputFormat {
     }
     }
   }
   }
 
 
-  @Test
   public void testMultiLevelInput() throws Exception {
   public void testMultiLevelInput() throws Exception {
     JobConf job = new JobConf(conf);
     JobConf job = new JobConf(conf);
 
 
@@ -200,7 +195,6 @@ public class TestMRCJCFileInputFormat {
   }
   }
 
 
   @SuppressWarnings("rawtypes")
   @SuppressWarnings("rawtypes")
-  @Test
   public void testLastInputSplitAtSplitBoundary() throws Exception {
   public void testLastInputSplitAtSplitBoundary() throws Exception {
     FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
     FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
         128l * 1024 * 1024);
         128l * 1024 * 1024);
@@ -214,7 +208,6 @@ public class TestMRCJCFileInputFormat {
   }
   }
 
 
   @SuppressWarnings("rawtypes")
   @SuppressWarnings("rawtypes")
-  @Test
   public void testLastInputSplitExceedingSplitBoundary() throws Exception {
   public void testLastInputSplitExceedingSplitBoundary() throws Exception {
     FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
     FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
         128l * 1024 * 1024);
         128l * 1024 * 1024);
@@ -228,7 +221,6 @@ public class TestMRCJCFileInputFormat {
   }
   }
 
 
   @SuppressWarnings("rawtypes")
   @SuppressWarnings("rawtypes")
-  @Test
   public void testLastInputSplitSingleSplit() throws Exception {
   public void testLastInputSplitSingleSplit() throws Exception {
     FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
     FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
         128l * 1024 * 1024);
         128l * 1024 * 1024);
@@ -313,7 +305,7 @@ public class TestMRCJCFileInputFormat {
     DFSTestUtil.waitReplication(fileSys, name, replication);
     DFSTestUtil.waitReplication(fileSys, name, replication);
   }
   }
 
 
-  @After
+  @Override
   public void tearDown() throws Exception {
   public void tearDown() throws Exception {
     if (dfs != null) {
     if (dfs != null) {
       dfs.shutdown();
       dfs.shutdown();

+ 9 - 19
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java

@@ -18,25 +18,18 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.JobStatus;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
+import java.io.*;
 import java.net.URI;
 import java.net.URI;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapred.JobContextImpl;
+import org.apache.hadoop.mapred.TaskAttemptContextImpl;
+import org.apache.hadoop.mapreduce.JobStatus;
 
 
-public class TestMRCJCFileOutputCommitter {
+public class TestMRCJCFileOutputCommitter extends TestCase {
   private static Path outDir = new Path(
   private static Path outDir = new Path(
      System.getProperty("test.build.data", "/tmp"), "output");
      System.getProperty("test.build.data", "/tmp"), "output");
 
 
@@ -74,7 +67,6 @@ public class TestMRCJCFileOutputCommitter {
   }
   }
 
 
   @SuppressWarnings("unchecked")
   @SuppressWarnings("unchecked")
-  @Test
   public void testCommitter() throws Exception {
   public void testCommitter() throws Exception {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     setConfForFileOutputCommitter(job);
     setConfForFileOutputCommitter(job);
@@ -116,7 +108,6 @@ public class TestMRCJCFileOutputCommitter {
     FileUtil.fullyDelete(new File(outDir.toString()));
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
   }
 
 
-  @Test
   public void testAbort() throws IOException {
   public void testAbort() throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     setConfForFileOutputCommitter(job);
     setConfForFileOutputCommitter(job);
@@ -170,7 +161,6 @@ public class TestMRCJCFileOutputCommitter {
     }
     }
   }
   }
 
 
-  @Test
   public void testFailAbort() throws IOException {
   public void testFailAbort() throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
     job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");

+ 4 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java

@@ -22,6 +22,8 @@ import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.List;
 import java.util.List;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
@@ -38,8 +40,6 @@ import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
 import org.apache.hadoop.mapreduce.split.JobSplitWriter;
 import org.apache.hadoop.mapreduce.split.JobSplitWriter;
 import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
 import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
 
 
 /**
 /**
  *  Validates map phase progress.
  *  Validates map phase progress.
@@ -55,7 +55,7 @@ import static org.junit.Assert.assertTrue;
  *  once mapTask.run() is finished. Sort phase progress in map task is not
  *  once mapTask.run() is finished. Sort phase progress in map task is not
  *  validated here.
  *  validated here.
  */
  */
-public class TestMapProgress {
+public class TestMapProgress extends TestCase {
   public static final Log LOG = LogFactory.getLog(TestMapProgress.class);
   public static final Log LOG = LogFactory.getLog(TestMapProgress.class);
   private static String TEST_ROOT_DIR;
   private static String TEST_ROOT_DIR;
   static {
   static {
@@ -234,8 +234,7 @@ public class TestMapProgress {
   /**
   /**
    *  Validates map phase progress after each record is processed by map task
    *  Validates map phase progress after each record is processed by map task
    *  using custom task reporter.
    *  using custom task reporter.
-   */
-  @Test
+   */ 
   public void testMapProgress() throws Exception {
   public void testMapProgress() throws Exception {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     fs = FileSystem.getLocal(job);
     fs = FileSystem.getLocal(job);

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java

@@ -44,8 +44,8 @@ import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
 import org.apache.hadoop.io.serializer.Serializer;
 
 
 import org.apache.hadoop.mapred.Task.TaskReporter;
 import org.apache.hadoop.mapred.Task.TaskReporter;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+
+import junit.framework.TestCase;
 
 
 @SuppressWarnings(value={"unchecked", "deprecation"})
 @SuppressWarnings(value={"unchecked", "deprecation"})
 /**
 /**
@@ -56,7 +56,7 @@ import static org.junit.Assert.assertEquals;
  * framework's merge on the reduce side will merge the partitions created to
  * framework's merge on the reduce side will merge the partitions created to
  * generate the final output which is sorted on the key.
  * generate the final output which is sorted on the key.
  */
  */
-public class TestMerge {
+public class TestMerge extends TestCase {
   private static final int NUM_HADOOP_DATA_NODES = 2;
   private static final int NUM_HADOOP_DATA_NODES = 2;
   // Number of input files is same as the number of mappers.
   // Number of input files is same as the number of mappers.
   private static final int NUM_MAPPERS = 10;
   private static final int NUM_MAPPERS = 10;
@@ -69,7 +69,6 @@ public class TestMerge {
   // Where output goes.
   // Where output goes.
   private static final Path OUTPUT = new Path("/testplugin/output");
   private static final Path OUTPUT = new Path("/testplugin/output");
 
 
-  @Test
   public void testMerge() throws Exception {
   public void testMerge() throws Exception {
     MiniDFSCluster dfsCluster = null;
     MiniDFSCluster dfsCluster = null;
     MiniMRClientCluster mrCluster = null;
     MiniMRClientCluster mrCluster = null;

+ 2 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java

@@ -18,16 +18,14 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.junit.Test;
-
 import java.io.IOException;
 import java.io.IOException;
+import junit.framework.TestCase;
 
 
 /**
 /**
  * A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster.
  * A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster.
  */
  */
-public class TestMiniMRBringup {
+public class TestMiniMRBringup extends TestCase {
 
 
-  @Test
   public void testBringUp() throws IOException {
   public void testBringUp() throws IOException {
     MiniMRCluster mr = null;
     MiniMRCluster mr = null;
     try {
     try {

+ 8 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java

@@ -18,23 +18,20 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import java.io.*;
+import junit.framework.TestCase;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.MRCaching.TestResult;
 import org.apache.hadoop.mapred.MRCaching.TestResult;
 import org.junit.Ignore;
 import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
 
 
 /**
 /**
  * A JUnit test to test caching with DFS
  * A JUnit test to test caching with DFS
  * 
  * 
  */
  */
 @Ignore
 @Ignore
-public class TestMiniMRDFSCaching {
+public class TestMiniMRDFSCaching extends TestCase {
 
 
-  @Test
   public void testWithDFS() throws IOException {
   public void testWithDFS() throws IOException {
     MiniMRCluster mr = null;
     MiniMRCluster mr = null;
     MiniDFSCluster dfs = null;
     MiniDFSCluster dfs = null;
@@ -73,4 +70,9 @@ public class TestMiniMRDFSCaching {
       }
       }
     }
     }
   }
   }
+
+  public static void main(String[] argv) throws Exception {
+    TestMiniMRDFSCaching td = new TestMiniMRDFSCaching();
+    td.testWithDFS();
+  }
 }
 }

+ 11 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java

@@ -21,17 +21,17 @@ import java.io.IOException;
 import java.util.BitSet;
 import java.util.BitSet;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.Random;
 import java.util.Random;
+
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 
 
-public class TestMultiFileInputFormat {
+public class TestMultiFileInputFormat extends TestCase{
 
 
   private static JobConf job = new JobConf();
   private static JobConf job = new JobConf();
 
 
@@ -79,8 +79,7 @@ public class TestMultiFileInputFormat {
     FileInputFormat.setInputPaths(job, multiFileDir);
     FileInputFormat.setInputPaths(job, multiFileDir);
     return multiFileDir;
     return multiFileDir;
   }
   }
-
-  @Test
+  
   public void testFormat() throws IOException {
   public void testFormat() throws IOException {
     LOG.info("Test started");
     LOG.info("Test started");
     LOG.info("Max split count           = " + MAX_SPLIT_COUNT);
     LOG.info("Max split count           = " + MAX_SPLIT_COUNT);
@@ -123,8 +122,7 @@ public class TestMultiFileInputFormat {
     }
     }
     LOG.info("Test Finished");
     LOG.info("Test Finished");
   }
   }
-
-  @Test
+  
   public void testFormatWithLessPathsThanSplits() throws Exception {
   public void testFormatWithLessPathsThanSplits() throws Exception {
     MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
     MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
     FileSystem fs = FileSystem.getLocal(job);     
     FileSystem fs = FileSystem.getLocal(job);     
@@ -137,4 +135,9 @@ public class TestMultiFileInputFormat {
     initFiles(fs, 2, 500);
     initFiles(fs, 2, 500);
     assertEquals(2, format.getSplits(job, 4).length);
     assertEquals(2, format.getSplits(job, 4).length);
   }
   }
+  
+  public static void main(String[] args) throws Exception{
+    TestMultiFileInputFormat test = new TestMultiFileInputFormat();
+    test.testFormat();
+  }
 }
 }

+ 3 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java

@@ -27,19 +27,16 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.io.OutputStream;
 import java.util.Arrays;
 import java.util.Arrays;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.IOUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
 /**
 /**
  * 
  * 
  * test MultiFileSplit class
  * test MultiFileSplit class
  */
  */
-public class TestMultiFileSplit {
+public class TestMultiFileSplit extends TestCase{
 
 
-    @Test
     public void testReadWrite() throws Exception {
     public void testReadWrite() throws Exception {
       MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200});
       MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200});
         
         
@@ -73,7 +70,6 @@ public class TestMultiFileSplit {
      * test method getLocations
      * test method getLocations
      * @throws IOException
      * @throws IOException
      */
      */
-    @Test
     public void testgetLocations() throws IOException{
     public void testgetLocations() throws IOException{
         JobConf job= new JobConf();
         JobConf job= new JobConf();
       
       

+ 5 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java

@@ -17,6 +17,10 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import java.io.IOException;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
@@ -28,17 +32,12 @@ import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.junit.Ignore;
 import org.junit.Ignore;
-import org.junit.Test;
-
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
 
 
 /**
 /**
  * This test checks whether the task caches are created and used properly.
  * This test checks whether the task caches are created and used properly.
  */
  */
 @Ignore
 @Ignore
-public class TestMultipleLevelCaching {
+public class TestMultipleLevelCaching extends TestCase {
   private static final int MAX_LEVEL = 5;
   private static final int MAX_LEVEL = 5;
   final Path inDir = new Path("/cachetesting");
   final Path inDir = new Path("/cachetesting");
   final Path outputPath = new Path("/output");
   final Path outputPath = new Path("/output");
@@ -72,7 +71,6 @@ public class TestMultipleLevelCaching {
     return rack.toString();
     return rack.toString();
   }
   }
 
 
-  @Test
   public void testMultiLevelCaching() throws Exception {
   public void testMultiLevelCaching() throws Exception {
     for (int i = 1 ; i <= MAX_LEVEL; ++i) {
     for (int i = 1 ; i <= MAX_LEVEL; ++i) {
       testCachingAtLevel(i);
       testCachingAtLevel(i);

+ 11 - 12
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java

@@ -18,19 +18,15 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
-import org.junit.Test;
+import java.io.*;
+import junit.framework.TestCase;
 
 
-import java.io.File;
-import java.io.IOException;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import org.apache.hadoop.mapred.lib.*;
 
 
-public class TestMultipleTextOutputFormat {
+public class TestMultipleTextOutputFormat extends TestCase {
   private static JobConf defaultConf = new JobConf();
   private static JobConf defaultConf = new JobConf();
 
 
   private static FileSystem localFs = null;
   private static FileSystem localFs = null;
@@ -87,8 +83,7 @@ public class TestMultipleTextOutputFormat {
     writeData(rw);
     writeData(rw);
     rw.close(null);
     rw.close(null);
   }
   }
-
-  @Test
+  
   public void testFormat() throws Exception {
   public void testFormat() throws Exception {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     job.set(JobContext.TASK_ATTEMPT_ID, attempt);
     job.set(JobContext.TASK_ATTEMPT_ID, attempt);
@@ -150,4 +145,8 @@ public class TestMultipleTextOutputFormat {
     //System.out.printf("File_2 output: %s\n", output);
     //System.out.printf("File_2 output: %s\n", output);
     assertEquals(output, expectedOutput.toString());
     assertEquals(output, expectedOutput.toString());
   }
   }
+
+  public static void main(String[] args) throws Exception {
+    new TestMultipleTextOutputFormat().testFormat();
+  }
 }
 }

+ 4 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java

@@ -19,18 +19,17 @@
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.mapreduce.TaskCounter;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 
 public class TestReduceFetch extends TestReduceFetchFromPartialMem {
 public class TestReduceFetch extends TestReduceFetchFromPartialMem {
 
 
+  static {
+    setSuite(TestReduceFetch.class);
+  }
+
   /**
   /**
    * Verify that all segments are read from disk
    * Verify that all segments are read from disk
    * @throws Exception might be thrown
    * @throws Exception might be thrown
    */
    */
-  @Test
   public void testReduceFromDisk() throws Exception {
   public void testReduceFromDisk() throws Exception {
     final int MAP_TASKS = 8;
     final int MAP_TASKS = 8;
     JobConf job = mrCluster.createJobConf();
     JobConf job = mrCluster.createJobConf();
@@ -54,7 +53,6 @@ public class TestReduceFetch extends TestReduceFetchFromPartialMem {
    * Verify that no segment hits disk.
    * Verify that no segment hits disk.
    * @throws Exception might be thrown
    * @throws Exception might be thrown
    */
    */
-  @Test
   public void testReduceFromMem() throws Exception {
   public void testReduceFromMem() throws Exception {
     final int MAP_TASKS = 3;
     final int MAP_TASKS = 3;
     JobConf job = mrCluster.createJobConf();
     JobConf job = mrCluster.createJobConf();

+ 27 - 19
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java

@@ -18,6 +18,10 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import junit.extensions.TestSetup;
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
@@ -26,9 +30,7 @@ import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.mapreduce.TaskCounter;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.apache.hadoop.mapreduce.MRConfig;
 
 
 import java.io.DataInput;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.DataOutput;
@@ -37,27 +39,34 @@ import java.util.Arrays;
 import java.util.Formatter;
 import java.util.Formatter;
 import java.util.Iterator;
 import java.util.Iterator;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-public class TestReduceFetchFromPartialMem {
+public class TestReduceFetchFromPartialMem extends TestCase {
 
 
   protected static MiniMRCluster mrCluster = null;
   protected static MiniMRCluster mrCluster = null;
   protected static MiniDFSCluster dfsCluster = null;
   protected static MiniDFSCluster dfsCluster = null;
+  protected static TestSuite mySuite;
 
 
-  @Before
-  public void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
-    mrCluster = new MiniMRCluster(2,
-      dfsCluster.getFileSystem().getUri().toString(), 1);
+  protected static void setSuite(Class<? extends TestCase> klass) {
+    mySuite  = new TestSuite(klass);
   }
   }
 
 
-  @After
-  public void tearDown() throws Exception {
-    if (dfsCluster != null) { dfsCluster.shutdown(); }
-    if (mrCluster != null) { mrCluster.shutdown(); }
+  static {
+    setSuite(TestReduceFetchFromPartialMem.class);
+  }
+  
+  public static Test suite() {
+    TestSetup setup = new TestSetup(mySuite) {
+      protected void setUp() throws Exception {
+        Configuration conf = new Configuration();
+        dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+        mrCluster = new MiniMRCluster(2,
+            dfsCluster.getFileSystem().getUri().toString(), 1);
+      }
+      protected void tearDown() throws Exception {
+        if (dfsCluster != null) { dfsCluster.shutdown(); }
+        if (mrCluster != null) { mrCluster.shutdown(); }
+      }
+    };
+    return setup;
   }
   }
 
 
   private static final String tagfmt = "%04d";
   private static final String tagfmt = "%04d";
@@ -69,7 +78,6 @@ public class TestReduceFetchFromPartialMem {
   }
   }
 
 
   /** Verify that at least one segment does not hit disk */
   /** Verify that at least one segment does not hit disk */
-  @Test
   public void testReduceFromPartialMem() throws Exception {
   public void testReduceFromPartialMem() throws Exception {
     final int MAP_TASKS = 7;
     final int MAP_TASKS = 7;
     JobConf job = mrCluster.createJobConf();
     JobConf job = mrCluster.createJobConf();

+ 7 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java

@@ -17,6 +17,10 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import java.io.IOException;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -26,17 +30,11 @@ import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
-
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 
 /**
 /**
  * This test exercises the ValueIterator.
  * This test exercises the ValueIterator.
  */
  */
-public class TestReduceTask {
+public class TestReduceTask extends TestCase {
 
 
   static class NullProgress implements Progressable {
   static class NullProgress implements Progressable {
     public void progress() { }
     public void progress() { }
@@ -121,10 +119,9 @@ public class TestReduceTask {
     }
     }
     assertEquals(vals.length, i);
     assertEquals(vals.length, i);
     // make sure we have progress equal to 1.0
     // make sure we have progress equal to 1.0
-    assertEquals(1.0f, rawItr.getProgress().get(),0.0000);
+    assertEquals(1.0f, rawItr.getProgress().get());
   }
   }
 
 
-  @Test
   public void testValueIterator() throws Exception {
   public void testValueIterator() throws Exception {
     Path tmpDir = new Path("build/test/test.reduce.task");
     Path tmpDir = new Path("build/test/test.reduce.task");
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
@@ -132,8 +129,7 @@ public class TestReduceTask {
       runValueIterator(tmpDir, testCase, conf, null);
       runValueIterator(tmpDir, testCase, conf, null);
     }
     }
   }
   }
-
-  @Test
+  
   public void testValueIteratorWithCompression() throws Exception {
   public void testValueIteratorWithCompression() throws Exception {
     Path tmpDir = new Path("build/test/test.reduce.task.compression");
     Path tmpDir = new Path("build/test/test.reduce.task.compression");
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();

+ 6 - 13
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java

@@ -18,26 +18,19 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
 import java.io.IOException;
 import java.io.IOException;
 import java.util.Random;
 import java.util.Random;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+
+import junit.framework.TestCase;
+import org.apache.commons.logging.*;
 
 
-public class TestSequenceFileAsBinaryInputFormat {
+public class TestSequenceFileAsBinaryInputFormat extends TestCase {
   private static final Log LOG = FileInputFormat.LOG;
   private static final Log LOG = FileInputFormat.LOG;
   private static final int RECORDS = 10000;
   private static final int RECORDS = 10000;
 
 
-  @Test
   public void testBinary() throws IOException {
   public void testBinary() throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     FileSystem fs = FileSystem.getLocal(job);
     FileSystem fs = FileSystem.getLocal(job);

+ 9 - 22
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java

@@ -18,35 +18,24 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.DoubleWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.junit.Test;
-
 import java.io.IOException;
 import java.io.IOException;
 import java.util.Random;
 import java.util.Random;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
 
 
-public class TestSequenceFileAsBinaryOutputFormat {
+import junit.framework.TestCase;
+import org.apache.commons.logging.*;
+
+public class TestSequenceFileAsBinaryOutputFormat extends TestCase {
   private static final Log LOG =
   private static final Log LOG =
       LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName());
       LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName());
+
   private static final int RECORDS = 10000;
   private static final int RECORDS = 10000;
   // A random task attempt id for testing.
   // A random task attempt id for testing.
   private static final String attempt = "attempt_200707121733_0001_m_000000_0";
   private static final String attempt = "attempt_200707121733_0001_m_000000_0";
 
 
-  @Test
   public void testBinary() throws IOException {
   public void testBinary() throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     FileSystem fs = FileSystem.getLocal(job);
     FileSystem fs = FileSystem.getLocal(job);
@@ -140,8 +129,7 @@ public class TestSequenceFileAsBinaryOutputFormat {
     assertEquals("Some records not found", RECORDS, count);
     assertEquals("Some records not found", RECORDS, count);
   }
   }
 
 
-  @Test
-  public void testSequenceOutputClassDefaultsToMapRedOutputClass()
+  public void testSequenceOutputClassDefaultsToMapRedOutputClass() 
          throws IOException {
          throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     FileSystem fs = FileSystem.getLocal(job);
     FileSystem fs = FileSystem.getLocal(job);
@@ -175,7 +163,6 @@ public class TestSequenceFileAsBinaryOutputFormat {
                                                                          job));
                                                                          job));
   }
   }
 
 
-  @Test
   public void testcheckOutputSpecsForbidRecordCompression() throws IOException {
   public void testcheckOutputSpecsForbidRecordCompression() throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     FileSystem fs = FileSystem.getLocal(job);
     FileSystem fs = FileSystem.getLocal(job);

+ 15 - 18
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java

@@ -18,29 +18,22 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import java.util.BitSet;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-
-public class TestSequenceFileAsTextInputFormat {
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.*;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
+
+public class TestSequenceFileAsTextInputFormat extends TestCase {
   private static final Log LOG = FileInputFormat.LOG;
   private static final Log LOG = FileInputFormat.LOG;
 
 
   private static int MAX_LENGTH = 10000;
   private static int MAX_LENGTH = 10000;
   private static Configuration conf = new Configuration();
   private static Configuration conf = new Configuration();
 
 
-  @Test
   public void testFormat() throws Exception {
   public void testFormat() throws Exception {
     JobConf job = new JobConf(conf);
     JobConf job = new JobConf(conf);
     FileSystem fs = FileSystem.getLocal(conf);
     FileSystem fs = FileSystem.getLocal(conf);
@@ -119,4 +112,8 @@ public class TestSequenceFileAsTextInputFormat {
 
 
     }
     }
   }
   }
+
+  public static void main(String[] args) throws Exception {
+    new TestSequenceFileAsTextInputFormat().testFormat();
+  }
 }
 }

+ 15 - 17
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java

@@ -18,21 +18,17 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
 
 
-import java.io.IOException;
-import java.util.Random;
+import org.apache.commons.logging.*;
 
 
-import static org.junit.Assert.assertEquals;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
 
 
-public class TestSequenceFileInputFilter {
+public class TestSequenceFileInputFilter extends TestCase {
   private static final Log LOG = FileInputFormat.LOG;
   private static final Log LOG = FileInputFormat.LOG;
 
 
   private static final int MAX_LENGTH = 15000;
   private static final int MAX_LENGTH = 15000;
@@ -101,8 +97,7 @@ public class TestSequenceFileInputFilter {
     }
     }
     return count;
     return count;
   }
   }
-
-  @Test
+  
   public void testRegexFilter() throws Exception {
   public void testRegexFilter() throws Exception {
     // set the filter class
     // set the filter class
     LOG.info("Testing Regex Filter with patter: \\A10*");
     LOG.info("Testing Regex Filter with patter: \\A10*");
@@ -126,7 +121,6 @@ public class TestSequenceFileInputFilter {
     fs.delete(inDir, true);
     fs.delete(inDir, true);
   }
   }
 
 
-  @Test
   public void testPercentFilter() throws Exception {
   public void testPercentFilter() throws Exception {
     LOG.info("Testing Percent Filter with frequency: 1000");
     LOG.info("Testing Percent Filter with frequency: 1000");
     // set the filter class
     // set the filter class
@@ -153,8 +147,7 @@ public class TestSequenceFileInputFilter {
     // clean up
     // clean up
     fs.delete(inDir, true);
     fs.delete(inDir, true);
   }
   }
-
-  @Test
+  
   public void testMD5Filter() throws Exception {
   public void testMD5Filter() throws Exception {
     // set the filter class
     // set the filter class
     LOG.info("Testing MD5 Filter with frequency: 1000");
     LOG.info("Testing MD5 Filter with frequency: 1000");
@@ -175,4 +168,9 @@ public class TestSequenceFileInputFilter {
     // clean up
     // clean up
     fs.delete(inDir, true);
     fs.delete(inDir, true);
   }
   }
+
+  public static void main(String[] args) throws Exception {
+    TestSequenceFileInputFilter filter = new TestSequenceFileInputFilter();
+    filter.testRegexFilter();
+  }
 }
 }

+ 15 - 17
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java

@@ -18,28 +18,22 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.junit.Test;
-
-import java.util.BitSet;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-
-public class TestSequenceFileInputFormat {
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.*;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
+
+public class TestSequenceFileInputFormat extends TestCase {
   private static final Log LOG = FileInputFormat.LOG;
   private static final Log LOG = FileInputFormat.LOG;
 
 
   private static int MAX_LENGTH = 10000;
   private static int MAX_LENGTH = 10000;
   private static Configuration conf = new Configuration();
   private static Configuration conf = new Configuration();
 
 
-  @Test
   public void testFormat() throws Exception {
   public void testFormat() throws Exception {
     JobConf job = new JobConf(conf);
     JobConf job = new JobConf(conf);
     FileSystem fs = FileSystem.getLocal(conf);
     FileSystem fs = FileSystem.getLocal(conf);
@@ -116,4 +110,8 @@ public class TestSequenceFileInputFormat {
 
 
     }
     }
   }
   }
+
+  public static void main(String[] args) throws Exception {
+    new TestSequenceFileInputFormat().testFormat();
+  }
 }
 }

+ 8 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java

@@ -17,20 +17,18 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import java.util.Iterator;
+
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.SortedRanges.Range;
 import org.apache.hadoop.mapred.SortedRanges.Range;
-import org.junit.Test;
-
-import java.util.Iterator;
-
-import static org.junit.Assert.assertEquals;
 
 
-public class TestSortedRanges {
-  private static final Log LOG =
+public class TestSortedRanges extends TestCase {
+  private static final Log LOG = 
     LogFactory.getLog(TestSortedRanges.class);
     LogFactory.getLog(TestSortedRanges.class);
-
-  @Test
+  
   public void testAdd() {
   public void testAdd() {
     SortedRanges sr = new SortedRanges();
     SortedRanges sr = new SortedRanges();
     sr.add(new Range(2,9));
     sr.add(new Range(2,9));
@@ -68,8 +66,7 @@ public class TestSortedRanges {
     assertEquals(77, it.next().longValue());
     assertEquals(77, it.next().longValue());
     
     
   }
   }
-
-  @Test
+  
   public void testRemove() {
   public void testRemove() {
     SortedRanges sr = new SortedRanges();
     SortedRanges sr = new SortedRanges();
     sr.add(new Range(2,19));
     sr.add(new Range(2,19));

+ 10 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java

@@ -18,6 +18,12 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.net.URI;
+
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
@@ -28,20 +34,14 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.net.URI;
-
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
 
 /**
 /**
  * A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
  * A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
  */
  */
-public class TestSpecialCharactersInOutputPath {
+public class TestSpecialCharactersInOutputPath extends TestCase {
   private static final Log LOG =
   private static final Log LOG =
     LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName());
     LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName());
   
   
@@ -96,8 +96,7 @@ public class TestSpecialCharactersInOutputPath {
     LOG.info("job is complete: " + runningJob.isSuccessful());
     LOG.info("job is complete: " + runningJob.isSuccessful());
     return (runningJob.isSuccessful());
     return (runningJob.isSuccessful());
   }
   }
-
-  @Test
+  
   public void testJobWithDFS() throws IOException {
   public void testJobWithDFS() throws IOException {
     String namenode = null;
     String namenode = null;
     MiniDFSCluster dfs = null;
     MiniDFSCluster dfs = null;

+ 3 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java

@@ -19,18 +19,14 @@ package org.apache.hadoop.mapred;
 
 
 import java.util.Map;
 import java.util.Map;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
 import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
 import org.apache.hadoop.mapred.StatisticsCollector.Stat;
 import org.apache.hadoop.mapred.StatisticsCollector.Stat;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
 
 
-public class TestStatisticsCollector {
+public class TestStatisticsCollector extends TestCase{
 
 
   @SuppressWarnings("rawtypes")
   @SuppressWarnings("rawtypes")
-  @Test
   public void testMovingWindow() throws Exception {
   public void testMovingWindow() throws Exception {
     StatisticsCollector collector = new StatisticsCollector(1);
     StatisticsCollector collector = new StatisticsCollector(1);
     TimeWindow window = new TimeWindow("test", 6, 2);
     TimeWindow window = new TimeWindow("test", 6, 2);

+ 11 - 13
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java

@@ -17,15 +17,6 @@
  */
  */
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.lib.IdentityMapper;
-import org.apache.hadoop.mapred.lib.IdentityReducer;
-import org.junit.Test;
-
 import java.io.BufferedReader;
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
@@ -35,10 +26,18 @@ import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.io.Writer;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
 
 
-public class TestUserDefinedCounters {
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+
+public class TestUserDefinedCounters extends TestCase {
+  
   private static String TEST_ROOT_DIR =
   private static String TEST_ROOT_DIR =
     new File(System.getProperty("test.build.data", "/tmp")).toURI()
     new File(System.getProperty("test.build.data", "/tmp")).toURI()
     .toString().replace(' ', '+')
     .toString().replace(' ', '+')
@@ -76,7 +75,6 @@ public class TestUserDefinedCounters {
     wr.close();
     wr.close();
   }
   }
 
 
-  @Test
   public void testMapReduceJob() throws Exception {
   public void testMapReduceJob() throws Exception {
 
 
     JobConf conf = new JobConf(TestUserDefinedCounters.class);
     JobConf conf = new JobConf(TestUserDefinedCounters.class);

+ 8 - 12
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java

@@ -18,6 +18,12 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -25,15 +31,8 @@ import org.apache.hadoop.io.serializer.Deserializer;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
 import org.apache.hadoop.io.serializer.Serializer;
 import org.apache.hadoop.util.GenericsUtil;
 import org.apache.hadoop.util.GenericsUtil;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
-import static org.junit.Assert.assertTrue;
 
 
-public class TestWritableJobConf {
+public class TestWritableJobConf extends TestCase {
 
 
   private static final Configuration CONF = new Configuration();
   private static final Configuration CONF = new Configuration();
 
 
@@ -79,17 +78,15 @@ public class TestWritableJobConf {
       }
       }
     }
     }
 
 
-    assertTrue(map1.equals(map2));
+    assertEquals(map1, map2);
   }
   }
 
 
-  @Test
   public void testEmptyConfiguration() throws Exception {
   public void testEmptyConfiguration() throws Exception {
     JobConf conf = new JobConf();
     JobConf conf = new JobConf();
     Configuration deser = serDeser(conf);
     Configuration deser = serDeser(conf);
     assertEquals(conf, deser);
     assertEquals(conf, deser);
   }
   }
 
 
-  @Test
   public void testNonEmptyConfiguration() throws Exception {
   public void testNonEmptyConfiguration() throws Exception {
     JobConf conf = new JobConf();
     JobConf conf = new JobConf();
     conf.set("a", "A");
     conf.set("a", "A");
@@ -98,7 +95,6 @@ public class TestWritableJobConf {
     assertEquals(conf, deser);
     assertEquals(conf, deser);
   }
   }
 
 
-  @Test
   public void testConfigurationWithDefaults() throws Exception {
   public void testConfigurationWithDefaults() throws Exception {
     JobConf conf = new JobConf(false);
     JobConf conf = new JobConf(false);
     conf.set("a", "A");
     conf.set("a", "A");

+ 3 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java

@@ -18,10 +18,6 @@
 
 
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doReturn;
@@ -42,6 +38,8 @@ import java.security.PrivilegedExceptionAction;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
@@ -115,7 +113,7 @@ import org.mockito.stubbing.Answer;
  * Test YarnRunner and make sure the client side plugin works
  * Test YarnRunner and make sure the client side plugin works
  * fine
  * fine
  */
  */
-public class TestYARNRunner {
+public class TestYARNRunner extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
   private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 
 

+ 19 - 23
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java

@@ -22,6 +22,11 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.Iterator;
 
 
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import junit.extensions.TestSetup;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
@@ -49,27 +54,23 @@ import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
 
 
-public class TestDatamerge {
+public class TestDatamerge extends TestCase {
 
 
   private static MiniDFSCluster cluster = null;
   private static MiniDFSCluster cluster = null;
-
-  @Before
-  public void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
-  }
-  @After
-  public void tearDown() throws Exception {
-    if (cluster != null) {
-      cluster.shutdown();
-    }
+  public static Test suite() {
+    TestSetup setup = new TestSetup(new TestSuite(TestDatamerge.class)) {
+      protected void setUp() throws Exception {
+        Configuration conf = new Configuration();
+        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+      }
+      protected void tearDown() throws Exception {
+        if (cluster != null) {
+          cluster.shutdown();
+        }
+      }
+    };
+    return setup;
   }
   }
 
 
   private static SequenceFile.Writer[] createWriters(Path testdir,
   private static SequenceFile.Writer[] createWriters(Path testdir,
@@ -245,22 +246,18 @@ public class TestDatamerge {
     base.getFileSystem(job).delete(base, true);
     base.getFileSystem(job).delete(base, true);
   }
   }
 
 
-  @Test
   public void testSimpleInnerJoin() throws Exception {
   public void testSimpleInnerJoin() throws Exception {
     joinAs("inner", InnerJoinChecker.class);
     joinAs("inner", InnerJoinChecker.class);
   }
   }
 
 
-  @Test
   public void testSimpleOuterJoin() throws Exception {
   public void testSimpleOuterJoin() throws Exception {
     joinAs("outer", OuterJoinChecker.class);
     joinAs("outer", OuterJoinChecker.class);
   }
   }
 
 
-  @Test
   public void testSimpleOverride() throws Exception {
   public void testSimpleOverride() throws Exception {
     joinAs("override", OverrideChecker.class);
     joinAs("override", OverrideChecker.class);
   }
   }
 
 
-  @Test
   public void testNestedJoin() throws Exception {
   public void testNestedJoin() throws Exception {
     // outer(inner(S1,...,Sn),outer(S1,...Sn))
     // outer(inner(S1,...,Sn),outer(S1,...Sn))
     final int SOURCES = 3;
     final int SOURCES = 3;
@@ -353,7 +350,6 @@ public class TestDatamerge {
 
 
   }
   }
 
 
-  @Test
   public void testEmptyJoin() throws Exception {
   public void testEmptyJoin() throws Exception {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
     Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));

+ 7 - 17
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java

@@ -26,6 +26,8 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Random;
 import java.util.Random;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -34,12 +36,8 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.WritableUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 
 
-public class TestTupleWritable {
+public class TestTupleWritable extends TestCase {
 
 
   private TupleWritable makeTuple(Writable[] writs) {
   private TupleWritable makeTuple(Writable[] writs) {
     Writable[] sub1 = { writs[1], writs[2] };
     Writable[] sub1 = { writs[1], writs[2] };
@@ -102,7 +100,6 @@ public class TestTupleWritable {
     return i;
     return i;
   }
   }
 
 
-  @Test
   public void testIterable() throws Exception {
   public void testIterable() throws Exception {
     Random r = new Random();
     Random r = new Random();
     Writable[] writs = {
     Writable[] writs = {
@@ -124,7 +121,6 @@ public class TestTupleWritable {
     verifIter(writs, t, 0);
     verifIter(writs, t, 0);
   }
   }
 
 
-  @Test
   public void testNestedIterable() throws Exception {
   public void testNestedIterable() throws Exception {
     Random r = new Random();
     Random r = new Random();
     Writable[] writs = {
     Writable[] writs = {
@@ -143,7 +139,6 @@ public class TestTupleWritable {
     assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
     assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
   }
   }
 
 
-  @Test
   public void testWritable() throws Exception {
   public void testWritable() throws Exception {
     Random r = new Random();
     Random r = new Random();
     Writable[] writs = {
     Writable[] writs = {
@@ -167,7 +162,6 @@ public class TestTupleWritable {
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
   }
   }
 
 
-  @Test
   public void testWideWritable() throws Exception {
   public void testWideWritable() throws Exception {
     Writable[] manyWrits = makeRandomWritables(131);
     Writable[] manyWrits = makeRandomWritables(131);
     
     
@@ -186,8 +180,7 @@ public class TestTupleWritable {
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
     assertEquals("All tuple data has not been read from the stream",-1,in.read());
     assertEquals("All tuple data has not been read from the stream",-1,in.read());
   }
   }
-
-  @Test
+  
   public void testWideWritable2() throws Exception {
   public void testWideWritable2() throws Exception {
     Writable[] manyWrits = makeRandomWritables(71);
     Writable[] manyWrits = makeRandomWritables(71);
     
     
@@ -209,7 +202,6 @@ public class TestTupleWritable {
    * Tests a tuple writable with more than 64 values and the values set written
    * Tests a tuple writable with more than 64 values and the values set written
    * spread far apart.
    * spread far apart.
    */
    */
-  @Test
   public void testSparseWideWritable() throws Exception {
   public void testSparseWideWritable() throws Exception {
     Writable[] manyWrits = makeRandomWritables(131);
     Writable[] manyWrits = makeRandomWritables(131);
     
     
@@ -228,7 +220,7 @@ public class TestTupleWritable {
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
     assertEquals("All tuple data has not been read from the stream",-1,in.read());
     assertEquals("All tuple data has not been read from the stream",-1,in.read());
   }
   }
-  @Test
+  
   public void testWideTuple() throws Exception {
   public void testWideTuple() throws Exception {
     Text emptyText = new Text("Should be empty");
     Text emptyText = new Text("Should be empty");
     Writable[] values = new Writable[64];
     Writable[] values = new Writable[64];
@@ -248,7 +240,7 @@ public class TestTupleWritable {
       }
       }
     }
     }
   }
   }
-  @Test
+  
   public void testWideTuple2() throws Exception {
   public void testWideTuple2() throws Exception {
     Text emptyText = new Text("Should be empty");
     Text emptyText = new Text("Should be empty");
     Writable[] values = new Writable[64];
     Writable[] values = new Writable[64];
@@ -272,7 +264,6 @@ public class TestTupleWritable {
   /**
   /**
    * Tests that we can write more than 64 values.
    * Tests that we can write more than 64 values.
    */
    */
-  @Test
   public void testWideTupleBoundary() throws Exception {
   public void testWideTupleBoundary() throws Exception {
     Text emptyText = new Text("Should not be set written");
     Text emptyText = new Text("Should not be set written");
     Writable[] values = new Writable[65];
     Writable[] values = new Writable[65];
@@ -296,7 +287,6 @@ public class TestTupleWritable {
   /**
   /**
    * Tests compatibility with pre-0.21 versions of TupleWritable
    * Tests compatibility with pre-0.21 versions of TupleWritable
    */
    */
-  @Test
   public void testPreVersion21Compatibility() throws Exception {
   public void testPreVersion21Compatibility() throws Exception {
     Writable[] manyWrits = makeRandomWritables(64);
     Writable[] manyWrits = makeRandomWritables(64);
     PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
     PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
@@ -314,7 +304,7 @@ public class TestTupleWritable {
     assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
     assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
     assertEquals("All tuple data has not been read from the stream",-1,in.read());
     assertEquals("All tuple data has not been read from the stream",-1,in.read());
   }
   }
-  @Test
+  
   public void testPreVersion21CompatibilityEmptyTuple() throws Exception {
   public void testPreVersion21CompatibilityEmptyTuple() throws Exception {
     Writable[] manyWrits = new Writable[0];
     Writable[] manyWrits = new Writable[0];
     PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
     PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java

@@ -21,6 +21,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.NullWritable;
@@ -33,16 +35,13 @@ import org.apache.hadoop.mapred.JobConfigurable;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
 
 
-public class TestWrappedRecordReaderClassloader {
+public class TestWrappedRecordReaderClassloader extends TestCase {
   /**
   /**
    * Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)}
    * Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)}
    * is inherited by any {@link WrappedRecordReader}s created by
    * is inherited by any {@link WrappedRecordReader}s created by
    * {@link CompositeRecordReader}
    * {@link CompositeRecordReader}
    */
    */
-  @Test
   public void testClassLoader() throws Exception {
   public void testClassLoader() throws Exception {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     Fake_ClassLoader classLoader = new Fake_ClassLoader();
     Fake_ClassLoader classLoader = new Fake_ClassLoader();

+ 4 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java

@@ -20,6 +20,8 @@ package org.apache.hadoop.mapred.lib;
 import java.io.DataOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -30,12 +32,9 @@ import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
 
 
-public class TestDelegatingInputFormat {
-  @Test
+public class TestDelegatingInputFormat extends TestCase {
+
   public void testSplitting() throws Exception {
   public void testSplitting() throws Exception {
     JobConf conf = new JobConf();
     JobConf conf = new JobConf();
     MiniDFSCluster dfs = null;
     MiniDFSCluster dfs = null;

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java

@@ -20,14 +20,13 @@ package org.apache.hadoop.mapred.lib;
 
 
 import java.io.*;
 import java.io.*;
 import java.util.*;
 import java.util.*;
+import junit.framework.TestCase;
 
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.*;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
 
-public class TestLineInputFormat {
+public class TestLineInputFormat extends TestCase {
   private static int MAX_LENGTH = 200;
   private static int MAX_LENGTH = 200;
   
   
   private static JobConf defaultConf = new JobConf();
   private static JobConf defaultConf = new JobConf();
@@ -44,7 +43,7 @@ public class TestLineInputFormat {
   private static Path workDir = 
   private static Path workDir = 
     new Path(new Path(System.getProperty("test.build.data", "."), "data"),
     new Path(new Path(System.getProperty("test.build.data", "."), "data"),
              "TestLineInputFormat");
              "TestLineInputFormat");
-  @Test
+  
   public void testFormat() throws Exception {
   public void testFormat() throws Exception {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     Path file = new Path(workDir, "test.txt");
     Path file = new Path(workDir, "test.txt");

+ 2 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java

@@ -36,6 +36,7 @@ import static org.junit.Assert.assertEquals;
  * @see TestDelegatingInputFormat
  * @see TestDelegatingInputFormat
  */
  */
 public class TestMultipleInputs {
 public class TestMultipleInputs {
+
   @Test
   @Test
   public void testAddInputPathWithFormat() {
   public void testAddInputPathWithFormat() {
     final JobConf conf = new JobConf();
     final JobConf conf = new JobConf();
@@ -48,6 +49,7 @@ public class TestMultipleInputs {
     assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
     assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
        .getClass());
        .getClass());
   }
   }
+
   @Test
   @Test
   public void testAddInputPathWithMapper() {
   public void testAddInputPathWithMapper() {
     final JobConf conf = new JobConf();
     final JobConf conf = new JobConf();

+ 3 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java

@@ -22,14 +22,13 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.lib.*;
 import org.apache.hadoop.mapred.lib.*;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
 
+import junit.framework.TestCase;
 import java.io.*;
 import java.io.*;
 import java.util.*;
 import java.util.*;
 import java.text.NumberFormat;
 import java.text.NumberFormat;
 
 
-public class TestAggregates {
+public class TestAggregates extends TestCase {
 
 
   private static NumberFormat idFormat = NumberFormat.getInstance();
   private static NumberFormat idFormat = NumberFormat.getInstance();
     static {
     static {
@@ -37,7 +36,7 @@ public class TestAggregates {
       idFormat.setGroupingUsed(false);
       idFormat.setGroupingUsed(false);
   }
   }
 
 
-  @Test
+
   public void testAggregates() throws Exception {
   public void testAggregates() throws Exception {
     launch();
     launch();
   }
   }

+ 8 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java

@@ -19,13 +19,13 @@ package org.apache.hadoop.mapred.lib.db;
 
 
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobConf;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
 
 
-public class TestConstructQuery {
+public class TestConstructQuery extends TestCase {
+  
   private String[] fieldNames = new String[] { "id", "name", "value" };
   private String[] fieldNames = new String[] { "id", "name", "value" };
   private String[] nullFieldNames = new String[] { null, null, null };
   private String[] nullFieldNames = new String[] { null, null, null };
   private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);";
   private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);";
@@ -33,15 +33,15 @@ public class TestConstructQuery {
   
   
   private DBOutputFormat<DBWritable, NullWritable> format 
   private DBOutputFormat<DBWritable, NullWritable> format 
     = new DBOutputFormat<DBWritable, NullWritable>();
     = new DBOutputFormat<DBWritable, NullWritable>();
-  @Test
-  public void testConstructQuery() {
+  
+  public void testConstructQuery() {  
     String actual = format.constructQuery("hadoop_output", fieldNames);
     String actual = format.constructQuery("hadoop_output", fieldNames);
     assertEquals(expected, actual);
     assertEquals(expected, actual);
-
+    
     actual = format.constructQuery("hadoop_output", nullFieldNames);
     actual = format.constructQuery("hadoop_output", nullFieldNames);
     assertEquals(nullExpected, actual);
     assertEquals(nullExpected, actual);
   }
   }
-  @Test
+  
   public void testSetOutput() throws IOException {
   public void testSetOutput() throws IOException {
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
     DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);

+ 3 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java

@@ -44,13 +44,10 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Ignore;
 import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
 
 
+import junit.framework.TestCase;
 @Ignore
 @Ignore
-public class TestPipes {
+public class TestPipes extends TestCase {
   private static final Log LOG =
   private static final Log LOG =
     LogFactory.getLog(TestPipes.class.getName());
     LogFactory.getLog(TestPipes.class.getName());
   
   
@@ -69,7 +66,7 @@ public class TestPipes {
     fs.delete(p, true);
     fs.delete(p, true);
     assertFalse("output not cleaned up", fs.exists(p));
     assertFalse("output not cleaned up", fs.exists(p));
   }
   }
-  @Test
+
   public void testPipes() throws IOException {
   public void testPipes() throws IOException {
     if (System.getProperty("compile.c++") == null) {
     if (System.getProperty("compile.c++") == null) {
       LOG.info("compile.c++ is not defined, so skipping TestPipes");
       LOG.info("compile.c++ is not defined, so skipping TestPipes");

+ 14 - 20
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java

@@ -17,42 +17,36 @@
  */
  */
 package org.apache.hadoop.mapreduce;
 package org.apache.hadoop.mapreduce;
 
 
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.fs.*;
 import org.apache.hadoop.mapred.LocalJobRunner;
 import org.apache.hadoop.mapred.LocalJobRunner;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
 
 
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import org.junit.Test;
+import junit.framework.TestCase;
 
 
 /**
 /**
  * Stress tests for the LocalJobRunner
  * Stress tests for the LocalJobRunner
  */
  */
-public class TestLocalRunner {
+public class TestLocalRunner extends TestCase {
 
 
   private static final Log LOG = LogFactory.getLog(TestLocalRunner.class);
   private static final Log LOG = LogFactory.getLog(TestLocalRunner.class);
 
 

+ 22 - 27
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java

@@ -17,23 +17,6 @@
  */
  */
 package org.apache.hadoop.mapreduce;
 package org.apache.hadoop.mapreduce;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
-import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
-import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.mapreduce.tools.CLI;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.junit.Test;
-
 import java.io.BufferedReader;
 import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.ByteArrayOutputStream;
@@ -48,11 +31,23 @@ import java.io.PipedOutputStream;
 import java.io.PrintStream;
 import java.io.PrintStream;
 import java.util.Arrays;
 import java.util.Arrays;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.apache.hadoop.fs.LocatedFileStatus;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hadoop.mapreduce.tools.CLI;
+import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 
 /**
 /**
  test CLI class. CLI class implemented  the Tool interface. 
  test CLI class. CLI class implemented  the Tool interface. 
@@ -108,7 +103,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
       throw new IOException();
       throw new IOException();
     }
     }
   }
   }
-  @Test
+  
   public void testJobSubmissionSpecsAndFiles() throws Exception {
   public void testJobSubmissionSpecsAndFiles() throws Exception {
     Configuration conf = createJobConf();
     Configuration conf = createJobConf();
     Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(),
     Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(),
@@ -132,7 +127,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
   /**
   /**
    * main test method
    * main test method
    */
    */
-  @Test
+
   public void testJobClient() throws Exception {
   public void testJobClient() throws Exception {
     Configuration conf = createJobConf();
     Configuration conf = createJobConf();
     Job job = runJob(conf);
     Job job = runJob(conf);
@@ -185,7 +180,8 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
 
 
     runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
     runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
     String answer = new String(out.toByteArray(), "UTF-8");
     String answer = new String(out.toByteArray(), "UTF-8");
-    assertTrue(answer.contains("Killed task " + taid + " by failing it"));
+    Assert
+      .assertTrue(answer.contains("Killed task " + taid + " by failing it"));
   }
   }
 
 
   /**
   /**
@@ -203,7 +199,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
 
 
     runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out);
     runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out);
     String answer = new String(out.toByteArray(), "UTF-8");
     String answer = new String(out.toByteArray(), "UTF-8");
-    assertTrue(answer.contains("Killed task " + taid));
+    Assert.assertTrue(answer.contains("Killed task " + taid));
   }
   }
   
   
   /**
   /**
@@ -690,7 +686,6 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
    * Test -list option displays job name.
    * Test -list option displays job name.
    * The name is capped to 20 characters for display.
    * The name is capped to 20 characters for display.
    */
    */
-  @Test
   public void testJobName() throws Exception {
   public void testJobName() throws Exception {
     Configuration conf = createJobConf();
     Configuration conf = createJobConf();
     CLI jc = createJobClient();
     CLI jc = createJobClient();

+ 4 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java

@@ -25,6 +25,8 @@ import java.io.Writer;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.List;
 import java.util.List;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileUtil;
@@ -40,16 +42,13 @@ import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
 
 
 /**
 /**
  * A JUnit test to test the Map-Reduce framework's feature to create part
  * A JUnit test to test the Map-Reduce framework's feature to create part
  * files only if there is an explicit output.collect. This helps in preventing
  * files only if there is an explicit output.collect. This helps in preventing
  * 0 byte files
  * 0 byte files
  */
  */
-public class TestMapReduceLazyOutput {
+public class TestMapReduceLazyOutput extends TestCase {
   private static final int NUM_HADOOP_SLAVES = 3;
   private static final int NUM_HADOOP_SLAVES = 3;
   private static final int NUM_MAPS_PER_NODE = 2;
   private static final int NUM_MAPS_PER_NODE = 2;
   private static final Path INPUT = new Path("/testlazy/input");
   private static final Path INPUT = new Path("/testlazy/input");
@@ -123,7 +122,7 @@ public class TestMapReduceLazyOutput {
     }
     }
   }
   }
 
 
-  @Test
+
   public void testLazyOutput() throws Exception {
   public void testLazyOutput() throws Exception {
     MiniDFSCluster dfs = null;
     MiniDFSCluster dfs = null;
     MiniMRCluster mr = null;
     MiniMRCluster mr = null;

+ 3 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java

@@ -27,6 +27,8 @@ import java.io.Writer;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.StringTokenizer;
 import java.util.StringTokenizer;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
@@ -41,15 +43,12 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
 
 
 /**
 /**
  * A JUnit test to test the Map-Reduce framework's support for the
  * A JUnit test to test the Map-Reduce framework's support for the
  * "mark-reset" functionality in Reduce Values Iterator
  * "mark-reset" functionality in Reduce Values Iterator
  */
  */
-public class TestValueIterReset {
+public class TestValueIterReset extends TestCase {
   private static final int NUM_MAPS = 1;
   private static final int NUM_MAPS = 1;
   private static final int NUM_TESTS = 4;
   private static final int NUM_TESTS = 4;
   private static final int NUM_VALUES = 40;
   private static final int NUM_VALUES = 40;
@@ -519,7 +518,6 @@ public class TestValueIterReset {
     }
     }
   }
   }
 
 
-  @Test
   public void testValueIterReset() {
   public void testValueIterReset() {
     try {
     try {
       Configuration conf = new Configuration();
       Configuration conf = new Configuration();

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java

@@ -18,7 +18,6 @@
 
 
 package org.apache.hadoop.mapreduce;
 package org.apache.hadoop.mapreduce;
 
 
-import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 import static org.mockito.Mockito.when;
@@ -27,6 +26,7 @@ import static org.mockito.Mockito.doNothing;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.ByteBuffer;
+import junit.framework.TestCase;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
@@ -44,7 +44,8 @@ import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.junit.Test;
 import org.junit.Test;
 
 
-public class TestYarnClientProtocolProvider {
+public class TestYarnClientProtocolProvider extends TestCase {
+  
   private static final RecordFactory recordFactory = RecordFactoryProvider.
   private static final RecordFactory recordFactory = RecordFactoryProvider.
       getRecordFactory(null);
       getRecordFactory(null);
   
   

+ 14 - 9
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java

@@ -18,24 +18,22 @@
 package org.apache.hadoop.mapreduce.lib.aggregate;
 package org.apache.hadoop.mapreduce.lib.aggregate;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.junit.Test;
 
 
+import junit.framework.TestCase;
+import java.io.*;
 import java.text.NumberFormat;
 import java.text.NumberFormat;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class TestMapReduceAggregates {
+public class TestMapReduceAggregates extends TestCase {
 
 
   private static NumberFormat idFormat = NumberFormat.getInstance();
   private static NumberFormat idFormat = NumberFormat.getInstance();
     static {
     static {
@@ -43,7 +41,7 @@ public class TestMapReduceAggregates {
       idFormat.setGroupingUsed(false);
       idFormat.setGroupingUsed(false);
   }
   }
 
 
-  @Test
+
   public void testAggregates() throws Exception {
   public void testAggregates() throws Exception {
     launch();
     launch();
   }
   }
@@ -124,4 +122,11 @@ public class TestMapReduceAggregates {
     fs.delete(OUTPUT_DIR, true);
     fs.delete(OUTPUT_DIR, true);
     fs.delete(INPUT_DIR, true);
     fs.delete(INPUT_DIR, true);
   }
   }
+  
+  /**
+   * Launches all the tasks in order.
+   */
+  public static void main(String[] argv) throws Exception {
+    launch();
+  }
 }
 }

+ 7 - 10
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java

@@ -19,15 +19,14 @@ package org.apache.hadoop.mapreduce.lib.db;
 
 
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Job;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
 
 
-public class TestDBOutputFormat {
+public class TestDBOutputFormat extends TestCase {
+  
   private String[] fieldNames = new String[] { "id", "name", "value" };
   private String[] fieldNames = new String[] { "id", "name", "value" };
   private String[] nullFieldNames = new String[] { null, null, null };
   private String[] nullFieldNames = new String[] { null, null, null };
   private String expected = "INSERT INTO hadoop_output " +
   private String expected = "INSERT INTO hadoop_output " +
@@ -36,17 +35,15 @@ public class TestDBOutputFormat {
   
   
   private DBOutputFormat<DBWritable, NullWritable> format 
   private DBOutputFormat<DBWritable, NullWritable> format 
     = new DBOutputFormat<DBWritable, NullWritable>();
     = new DBOutputFormat<DBWritable, NullWritable>();
-
-  @Test
-  public void testConstructQuery() {
+  
+  public void testConstructQuery() {  
     String actual = format.constructQuery("hadoop_output", fieldNames);
     String actual = format.constructQuery("hadoop_output", fieldNames);
     assertEquals(expected, actual);
     assertEquals(expected, actual);
     
     
     actual = format.constructQuery("hadoop_output", nullFieldNames);
     actual = format.constructQuery("hadoop_output", nullFieldNames);
     assertEquals(nullExpected, actual);
     assertEquals(nullExpected, actual);
   }
   }
-
-  @Test
+  
   public void testSetOutput() throws IOException {
   public void testSetOutput() throws IOException {
     Job job = Job.getInstance(new Configuration());
     Job job = Job.getInstance(new Configuration());
     DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
     DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);

+ 5 - 10
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java

@@ -17,15 +17,15 @@
  */
  */
 package org.apache.hadoop.mapreduce.lib.db;
 package org.apache.hadoop.mapreduce.lib.db;
 
 
-import org.junit.Test;
-
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.SQLException;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.List;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import junit.framework.TestCase;
 
 
-public class TestIntegerSplitter {
+public class TestIntegerSplitter extends TestCase {
   private long [] toLongArray(List<Long> in) {
   private long [] toLongArray(List<Long> in) {
     long [] out = new long[in.size()];
     long [] out = new long[in.size()];
     for (int i = 0; i < in.size(); i++) {
     for (int i = 0; i < in.size(); i++) {
@@ -70,14 +70,12 @@ public class TestIntegerSplitter {
     }
     }
   }
   }
 
 
-  @Test
   public void testEvenSplits() throws SQLException {
   public void testEvenSplits() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(10, 0, 100);
     List<Long> splits = new IntegerSplitter().split(10, 0, 100);
     long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
     long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
     assertLongArrayEquals(expected, toLongArray(splits));
     assertLongArrayEquals(expected, toLongArray(splits));
   }
   }
 
 
-  @Test
   public void testOddSplits() throws SQLException {
   public void testOddSplits() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(10, 0, 95);
     List<Long> splits = new IntegerSplitter().split(10, 0, 95);
     long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 };
     long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 };
@@ -85,14 +83,12 @@ public class TestIntegerSplitter {
 
 
   }
   }
 
 
-  @Test
   public void testSingletonSplit() throws SQLException {
   public void testSingletonSplit() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(1, 5, 5);
     List<Long> splits = new IntegerSplitter().split(1, 5, 5);
     long [] expected = { 5, 5 };
     long [] expected = { 5, 5 };
     assertLongArrayEquals(expected, toLongArray(splits));
     assertLongArrayEquals(expected, toLongArray(splits));
   }
   }
 
 
-  @Test
   public void testSingletonSplit2() throws SQLException {
   public void testSingletonSplit2() throws SQLException {
     // Same test, but overly-high numSplits
     // Same test, but overly-high numSplits
     List<Long> splits = new IntegerSplitter().split(5, 5, 5);
     List<Long> splits = new IntegerSplitter().split(5, 5, 5);
@@ -100,7 +96,6 @@ public class TestIntegerSplitter {
     assertLongArrayEquals(expected, toLongArray(splits));
     assertLongArrayEquals(expected, toLongArray(splits));
   }
   }
 
 
-  @Test
   public void testTooManySplits() throws SQLException {
   public void testTooManySplits() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(5, 3, 5);
     List<Long> splits = new IntegerSplitter().split(5, 3, 5);
     long [] expected = { 3, 4, 5 };
     long [] expected = { 3, 4, 5 };

+ 4 - 14
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java

@@ -17,16 +17,15 @@
  */
  */
 package org.apache.hadoop.mapreduce.lib.db;
 package org.apache.hadoop.mapreduce.lib.db;
 
 
-import org.junit.Test;
-
+import java.io.IOException;
 import java.math.BigDecimal;
 import java.math.BigDecimal;
 import java.sql.SQLException;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.List;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import junit.framework.TestCase;
 
 
-public class TestTextSplitter {
+public class TestTextSplitter extends TestCase {
 
 
   public String formatArray(Object [] ar) {
   public String formatArray(Object [] ar) {
     StringBuilder sb = new StringBuilder();
     StringBuilder sb = new StringBuilder();
@@ -63,56 +62,48 @@ public class TestTextSplitter {
     }
     }
   }
   }
 
 
-  @Test
   public void testStringConvertEmpty() {
   public void testStringConvertEmpty() {
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
     BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
     BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
     assertEquals(BigDecimal.ZERO, emptyBigDec);
     assertEquals(BigDecimal.ZERO, emptyBigDec);
   }
   }
 
 
-  @Test
   public void testBigDecConvertEmpty() {
   public void testBigDecConvertEmpty() {
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
     String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
     String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
     assertEquals("", emptyStr);
     assertEquals("", emptyStr);
   }
   }
 
 
-  @Test
   public void testConvertA() {
   public void testConvertA() {
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
     assertEquals("A", out);
     assertEquals("A", out);
   }
   }
 
 
-  @Test
   public void testConvertZ() {
   public void testConvertZ() {
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
     assertEquals("Z", out);
     assertEquals("Z", out);
   }
   }
 
 
-  @Test
   public void testConvertThreeChars() {
   public void testConvertThreeChars() {
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc"));
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc"));
     assertEquals("abc", out);
     assertEquals("abc", out);
   }
   }
 
 
-  @Test
   public void testConvertStr() {
   public void testConvertStr() {
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str"));
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str"));
     assertEquals("big str", out);
     assertEquals("big str", out);
   }
   }
 
 
-  @Test
   public void testConvertChomped() {
   public void testConvertChomped() {
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed"));
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed"));
     assertEquals("AVeryLon", out);
     assertEquals("AVeryLon", out);
   }
   }
 
 
-  @Test
   public void testAlphabetSplit() throws SQLException {
   public void testAlphabetSplit() throws SQLException {
     // This should give us 25 splits, one per letter.
     // This should give us 25 splits, one per letter.
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();
@@ -122,7 +113,6 @@ public class TestTextSplitter {
     assertArrayEquals(expected, splits.toArray(new String [0]));
     assertArrayEquals(expected, splits.toArray(new String [0]));
   }
   }
 
 
-  @Test
   public void testCommonPrefix() throws SQLException {
   public void testCommonPrefix() throws SQLException {
     // Splits between 'Hand' and 'Hardy'
     // Splits between 'Hand' and 'Hardy'
     TextSplitter splitter = new TextSplitter();
     TextSplitter splitter = new TextSplitter();

+ 11 - 9
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java

@@ -18,19 +18,15 @@
 package org.apache.hadoop.mapreduce.lib.fieldsel;
 package org.apache.hadoop.mapreduce.lib.fieldsel;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
-import org.junit.Test;
 
 
+import junit.framework.TestCase;
 import java.text.NumberFormat;
 import java.text.NumberFormat;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class TestMRFieldSelection {
+public class TestMRFieldSelection extends TestCase {
 
 
 private static NumberFormat idFormat = NumberFormat.getInstance();
 private static NumberFormat idFormat = NumberFormat.getInstance();
   static {
   static {
@@ -38,7 +34,6 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
     idFormat.setGroupingUsed(false);
     idFormat.setGroupingUsed(false);
   }
   }
 
 
-  @Test
   public void testFieldSelection() throws Exception {
   public void testFieldSelection() throws Exception {
     launch();
     launch();
   }
   }
@@ -119,4 +114,11 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
     System.out.println("ExpectedData:");
     System.out.println("ExpectedData:");
     System.out.println(expectedOutput.toString());
     System.out.println(expectedOutput.toString());
   }
   }
+  
+  /**
+   * Launches all the tasks in order.
+   */
+  public static void main(String[] argv) throws Exception {
+    launch();
+  }
 }
 }

+ 7 - 14
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java

@@ -18,12 +18,11 @@
 
 
 package org.apache.hadoop.mapreduce.lib.input;
 package org.apache.hadoop.mapreduce.lib.input;
 
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
+import java.io.IOException;
+import java.util.Random;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Job;
@@ -32,18 +31,12 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.Random;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
 
 
-public class TestMRSequenceFileAsBinaryInputFormat {
+public class TestMRSequenceFileAsBinaryInputFormat extends TestCase {
   private static final int RECORDS = 10000;
   private static final int RECORDS = 10000;
 
 
-  @Test
   public void testBinary() throws IOException, InterruptedException {
   public void testBinary() throws IOException, InterruptedException {
     Job job = Job.getInstance();
     Job job = Job.getInstance();
     FileSystem fs = FileSystem.getLocal(job.getConfiguration());
     FileSystem fs = FileSystem.getLocal(job.getConfiguration());

+ 11 - 16
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java

@@ -18,13 +18,11 @@
 
 
 package org.apache.hadoop.mapreduce.lib.input;
 package org.apache.hadoop.mapreduce.lib.input;
 
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
+import java.util.*;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Job;
@@ -33,19 +31,12 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
-
-import java.util.BitSet;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import org.apache.hadoop.conf.*;
 
 
-public class TestMRSequenceFileAsTextInputFormat {
+public class TestMRSequenceFileAsTextInputFormat extends TestCase {
   private static int MAX_LENGTH = 10000;
   private static int MAX_LENGTH = 10000;
   private static Configuration conf = new Configuration();
   private static Configuration conf = new Configuration();
 
 
-  @Test
   public void testFormat() throws Exception {
   public void testFormat() throws Exception {
     Job job = Job.getInstance(conf);
     Job job = Job.getInstance(conf);
     FileSystem fs = FileSystem.getLocal(conf);
     FileSystem fs = FileSystem.getLocal(conf);
@@ -121,4 +112,8 @@ public class TestMRSequenceFileAsTextInputFormat {
 
 
     }
     }
   }
   }
+
+  public static void main(String[] args) throws Exception {
+    new TestMRSequenceFileAsTextInputFormat().testFormat();
+  }
 }
 }

+ 18 - 21
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java

@@ -18,14 +18,14 @@
 
 
 package org.apache.hadoop.mapreduce.lib.input;
 package org.apache.hadoop.mapreduce.lib.input;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.*;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Job;
@@ -34,15 +34,10 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.Random;
+import org.apache.hadoop.conf.*;
 
 
-import static org.junit.Assert.assertEquals;
-
-public class TestMRSequenceFileInputFilter {
-  private static final Log LOG =
+public class TestMRSequenceFileInputFilter extends TestCase {
+  private static final Log LOG = 
     LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName());
     LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName());
 
 
   private static final int MAX_LENGTH = 15000;
   private static final int MAX_LENGTH = 15000;
@@ -118,8 +113,7 @@ public class TestMRSequenceFileInputFilter {
     }
     }
     return count;
     return count;
   }
   }
-
-  @Test
+  
   public void testRegexFilter() throws Exception {
   public void testRegexFilter() throws Exception {
     // set the filter class
     // set the filter class
     LOG.info("Testing Regex Filter with patter: \\A10*");
     LOG.info("Testing Regex Filter with patter: \\A10*");
@@ -144,7 +138,6 @@ public class TestMRSequenceFileInputFilter {
     fs.delete(inDir, true);
     fs.delete(inDir, true);
   }
   }
 
 
-  @Test
   public void testPercentFilter() throws Exception {
   public void testPercentFilter() throws Exception {
     LOG.info("Testing Percent Filter with frequency: 1000");
     LOG.info("Testing Percent Filter with frequency: 1000");
     // set the filter class
     // set the filter class
@@ -172,8 +165,7 @@ public class TestMRSequenceFileInputFilter {
     // clean up
     // clean up
     fs.delete(inDir, true);
     fs.delete(inDir, true);
   }
   }
-
-  @Test
+  
   public void testMD5Filter() throws Exception {
   public void testMD5Filter() throws Exception {
     // set the filter class
     // set the filter class
     LOG.info("Testing MD5 Filter with frequency: 1000");
     LOG.info("Testing MD5 Filter with frequency: 1000");
@@ -195,4 +187,9 @@ public class TestMRSequenceFileInputFilter {
     // clean up
     // clean up
     fs.delete(inDir, true);
     fs.delete(inDir, true);
   }
   }
+
+  public static void main(String[] args) throws Exception {
+    TestMRSequenceFileInputFilter filter = new TestMRSequenceFileInputFilter();
+    filter.testRegexFilter();
+  }
 }
 }

+ 13 - 21
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java

@@ -18,28 +18,17 @@
 
 
 package org.apache.hadoop.mapreduce.lib.input;
 package org.apache.hadoop.mapreduce.lib.input;
 
 
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.MapContext;
-import org.apache.hadoop.mapreduce.MapReduceTestUtil;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.List;
 
 
-import static org.junit.Assert.assertEquals;
-
-public class TestNLineInputFormat {
+public class TestNLineInputFormat extends TestCase {
   private static int MAX_LENGTH = 200;
   private static int MAX_LENGTH = 200;
   
   
   private static Configuration conf = new Configuration();
   private static Configuration conf = new Configuration();
@@ -56,8 +45,7 @@ public class TestNLineInputFormat {
   private static Path workDir = 
   private static Path workDir = 
     new Path(new Path(System.getProperty("test.build.data", "."), "data"),
     new Path(new Path(System.getProperty("test.build.data", "."), "data"),
              "TestNLineInputFormat");
              "TestNLineInputFormat");
-
-  @Test
+  
   public void testFormat() throws Exception {
   public void testFormat() throws Exception {
     Job job = Job.getInstance(conf);
     Job job = Job.getInstance(conf);
     Path file = new Path(workDir, "test.txt");
     Path file = new Path(workDir, "test.txt");
@@ -128,4 +116,8 @@ public class TestNLineInputFormat {
       }
       }
     }
     }
   }
   }
+  
+  public static void main(String[] args) throws Exception {
+    new TestNLineInputFormat().testFormat();
+  }
 }
 }

+ 22 - 30
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java

@@ -19,6 +19,11 @@ package org.apache.hadoop.mapreduce.lib.join;
 
 
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import junit.extensions.TestSetup;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
@@ -32,31 +37,23 @@ import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
 
 
-public class TestJoinDatamerge {
+public class TestJoinDatamerge extends TestCase {
 
 
   private static MiniDFSCluster cluster = null;
   private static MiniDFSCluster cluster = null;
-
-  @BeforeClass
-  public static void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
-  }
-
-  @AfterClass
-  public static void tearDown() throws Exception {
-    if (cluster != null) {
-      cluster.shutdown();
-    }
+  public static Test suite() {
+    TestSetup setup = new TestSetup(new TestSuite(TestJoinDatamerge.class)) {
+      protected void setUp() throws Exception {
+        Configuration conf = new Configuration();
+        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+      }
+      protected void tearDown() throws Exception {
+        if (cluster != null) {
+          cluster.shutdown();
+        }
+      }
+    };
+    return setup;
   }
   }
 
 
   private static SequenceFile.Writer[] createWriters(Path testdir,
   private static SequenceFile.Writer[] createWriters(Path testdir,
@@ -114,7 +111,7 @@ public class TestJoinDatamerge {
       extends Mapper<IntWritable, V, IntWritable, IntWritable>{
       extends Mapper<IntWritable, V, IntWritable, IntWritable>{
     protected final static IntWritable one = new IntWritable(1);
     protected final static IntWritable one = new IntWritable(1);
     int srcs;
     int srcs;
-
+    
     public void setup(Context context) {
     public void setup(Context context) {
       srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
       srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
       assertTrue("Invalid src count: " + srcs, srcs > 0);
       assertTrue("Invalid src count: " + srcs, srcs > 0);
@@ -126,7 +123,7 @@ public class TestJoinDatamerge {
     protected final static IntWritable one = new IntWritable(1);
     protected final static IntWritable one = new IntWritable(1);
 
 
     int srcs;
     int srcs;
-
+    
     public void setup(Context context) {
     public void setup(Context context) {
       srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
       srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
       assertTrue("Invalid src count: " + srcs, srcs > 0);
       assertTrue("Invalid src count: " + srcs, srcs > 0);
@@ -275,12 +272,10 @@ public class TestJoinDatamerge {
     base.getFileSystem(conf).delete(base, true);
     base.getFileSystem(conf).delete(base, true);
   }
   }
 
 
-  @Test
   public void testSimpleInnerJoin() throws Exception {
   public void testSimpleInnerJoin() throws Exception {
     joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class);
     joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class);
   }
   }
 
 
-  @Test
   public void testSimpleOuterJoin() throws Exception {
   public void testSimpleOuterJoin() throws Exception {
     joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class);
     joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class);
   }
   }
@@ -327,13 +322,11 @@ public class TestJoinDatamerge {
     }
     }
     return product;
     return product;
   }
   }
-
-  @Test
+  
   public void testSimpleOverride() throws Exception {
   public void testSimpleOverride() throws Exception {
     joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class);
     joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class);
   }
   }
 
 
-  @Test
   public void testNestedJoin() throws Exception {
   public void testNestedJoin() throws Exception {
     // outer(inner(S1,...,Sn),outer(S1,...Sn))
     // outer(inner(S1,...,Sn),outer(S1,...Sn))
     final int SOURCES = 3;
     final int SOURCES = 3;
@@ -429,7 +422,6 @@ public class TestJoinDatamerge {
 
 
   }
   }
 
 
-  @Test
   public void testEmptyJoin() throws Exception {
   public void testEmptyJoin() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
     Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));

+ 21 - 23
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java

@@ -20,6 +20,11 @@ package org.apache.hadoop.mapreduce.lib.join;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.List;
 import java.util.List;
 
 
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import junit.extensions.TestSetup;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -31,14 +36,8 @@ import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
 
 
-public class TestJoinProperties {
+public class TestJoinProperties extends TestCase {
 
 
   private static MiniDFSCluster cluster = null;
   private static MiniDFSCluster cluster = null;
   final static int SOURCES = 3;
   final static int SOURCES = 3;
@@ -47,19 +46,21 @@ public class TestJoinProperties {
   static Path[] src;
   static Path[] src;
   static Path base;
   static Path base;
 
 
-  @BeforeClass
-  public static void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
-    base = cluster.getFileSystem().makeQualified(new Path("/nested"));
-    src = generateSources(conf);
-  }
-
-  @AfterClass
-  public static void tearDown() throws Exception {
-    if (cluster != null) {
-      cluster.shutdown();
-    }
+  public static Test suite() {
+    TestSetup setup = new TestSetup(new TestSuite(TestJoinProperties.class)) {
+      protected void setUp() throws Exception {
+        Configuration conf = new Configuration();
+        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+        base = cluster.getFileSystem().makeQualified(new Path("/nested"));
+        src = generateSources(conf);
+      }
+      protected void tearDown() throws Exception {
+        if (cluster != null) {
+          cluster.shutdown();
+        }
+      }
+    };
+    return setup;
   }
   }
 
 
   // Sources from 0 to srcs-2 have IntWritable key and IntWritable value
   // Sources from 0 to srcs-2 have IntWritable key and IntWritable value
@@ -232,7 +233,6 @@ public class TestJoinProperties {
   }
   }
 
 
   // outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C)
   // outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C)
-  @Test
   public void testOuterAssociativity() throws Exception {
   public void testOuterAssociativity() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33);
     testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33);
@@ -241,7 +241,6 @@ public class TestJoinProperties {
   }
   }
  
  
   // inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C)
   // inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C)
-  @Test
   public void testInnerAssociativity() throws Exception {
   public void testInnerAssociativity() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2);
     testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2);
@@ -250,7 +249,6 @@ public class TestJoinProperties {
   }
   }
 
 
   // override(inner(A, B), A) == A
   // override(inner(A, B), A) == A
-  @Test
   public void testIdentity() throws Exception {
   public void testIdentity() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     testExpr4(conf);
     testExpr4(conf);

+ 6 - 18
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java

@@ -24,6 +24,8 @@ import java.io.DataOutputStream;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Random;
 import java.util.Random;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -31,13 +33,8 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.Writable;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
 
 
-public class TestJoinTupleWritable {
+public class TestJoinTupleWritable extends TestCase {
 
 
   private TupleWritable makeTuple(Writable[] writs) {
   private TupleWritable makeTuple(Writable[] writs) {
     Writable[] sub1 = { writs[1], writs[2] };
     Writable[] sub1 = { writs[1], writs[2] };
@@ -100,7 +97,6 @@ public class TestJoinTupleWritable {
     return i;
     return i;
   }
   }
 
 
-  @Test
   public void testIterable() throws Exception {
   public void testIterable() throws Exception {
     Random r = new Random();
     Random r = new Random();
     Writable[] writs = {
     Writable[] writs = {
@@ -122,7 +118,6 @@ public class TestJoinTupleWritable {
     verifIter(writs, t, 0);
     verifIter(writs, t, 0);
   }
   }
 
 
-  @Test
   public void testNestedIterable() throws Exception {
   public void testNestedIterable() throws Exception {
     Random r = new Random();
     Random r = new Random();
     Writable[] writs = {
     Writable[] writs = {
@@ -141,7 +136,6 @@ public class TestJoinTupleWritable {
     assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
     assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
   }
   }
 
 
-  @Test
   public void testWritable() throws Exception {
   public void testWritable() throws Exception {
     Random r = new Random();
     Random r = new Random();
     Writable[] writs = {
     Writable[] writs = {
@@ -165,7 +159,6 @@ public class TestJoinTupleWritable {
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
     assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
   }
   }
 
 
-  @Test
   public void testWideWritable() throws Exception {
   public void testWideWritable() throws Exception {
     Writable[] manyWrits = makeRandomWritables(131);
     Writable[] manyWrits = makeRandomWritables(131);
     
     
@@ -185,8 +178,7 @@ public class TestJoinTupleWritable {
     assertEquals("All tuple data has not been read from the stream", 
     assertEquals("All tuple data has not been read from the stream", 
       -1, in.read());
       -1, in.read());
   }
   }
-
-  @Test
+  
   public void testWideWritable2() throws Exception {
   public void testWideWritable2() throws Exception {
     Writable[] manyWrits = makeRandomWritables(71);
     Writable[] manyWrits = makeRandomWritables(71);
     
     
@@ -209,7 +201,6 @@ public class TestJoinTupleWritable {
    * Tests a tuple writable with more than 64 values and the values set written
    * Tests a tuple writable with more than 64 values and the values set written
    * spread far apart.
    * spread far apart.
    */
    */
-  @Test
   public void testSparseWideWritable() throws Exception {
   public void testSparseWideWritable() throws Exception {
     Writable[] manyWrits = makeRandomWritables(131);
     Writable[] manyWrits = makeRandomWritables(131);
     
     
@@ -229,8 +220,7 @@ public class TestJoinTupleWritable {
     assertEquals("All tuple data has not been read from the stream", 
     assertEquals("All tuple data has not been read from the stream", 
       -1, in.read());
       -1, in.read());
   }
   }
-
-  @Test
+  
   public void testWideTuple() throws Exception {
   public void testWideTuple() throws Exception {
     Text emptyText = new Text("Should be empty");
     Text emptyText = new Text("Should be empty");
     Writable[] values = new Writable[64];
     Writable[] values = new Writable[64];
@@ -251,8 +241,7 @@ public class TestJoinTupleWritable {
       }
       }
     }
     }
   }
   }
-
-  @Test
+  
   public void testWideTuple2() throws Exception {
   public void testWideTuple2() throws Exception {
     Text emptyText = new Text("Should be empty");
     Text emptyText = new Text("Should be empty");
     Writable[] values = new Writable[64];
     Writable[] values = new Writable[64];
@@ -277,7 +266,6 @@ public class TestJoinTupleWritable {
   /**
   /**
    * Tests that we can write more than 64 values.
    * Tests that we can write more than 64 values.
    */
    */
-  @Test
   public void testWideTupleBoundary() throws Exception {
   public void testWideTupleBoundary() throws Exception {
     Text emptyText = new Text("Should not be set written");
     Text emptyText = new Text("Should not be set written");
     Writable[] values = new Writable[65];
     Writable[] values = new Writable[65];

+ 4 - 13
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java

@@ -17,32 +17,23 @@
  */
  */
 package org.apache.hadoop.mapreduce.lib.join;
 package org.apache.hadoop.mapreduce.lib.join;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.MapReduceTestUtil;
+import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
 
 
-public class TestWrappedRRClassloader {
+public class TestWrappedRRClassloader extends TestCase {
   /**
   /**
    * Tests the class loader set by 
    * Tests the class loader set by 
    * {@link Configuration#setClassLoader(ClassLoader)}
    * {@link Configuration#setClassLoader(ClassLoader)}
    * is inherited by any {@link WrappedRecordReader}s created by
    * is inherited by any {@link WrappedRecordReader}s created by
    * {@link CompositeRecordReader}
    * {@link CompositeRecordReader}
    */
    */
-  @Test
   public void testClassLoader() throws Exception {
   public void testClassLoader() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     Fake_ClassLoader classLoader = new Fake_ClassLoader();
     Fake_ClassLoader classLoader = new Fake_ClassLoader();

+ 11 - 24
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java

@@ -18,17 +18,12 @@
 
 
 package org.apache.hadoop.mapreduce.lib.output;
 package org.apache.hadoop.mapreduce.lib.output;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.Random;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.DoubleWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -43,22 +38,16 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
 
 
-import java.io.IOException;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import junit.framework.TestCase;
+import org.apache.commons.logging.*;
 
 
-public class TestMRSequenceFileAsBinaryOutputFormat {
+public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase {
   private static final Log LOG =
   private static final Log LOG =
     LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
     LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
 
 
   private static final int RECORDS = 10000;
   private static final int RECORDS = 10000;
-
-  @Test
+  
   public void testBinary() throws IOException, InterruptedException {
   public void testBinary() throws IOException, InterruptedException {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     Job job = Job.getInstance(conf);
     Job job = Job.getInstance(conf);
@@ -155,8 +144,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat {
     assertEquals("Some records not found", RECORDS, count);
     assertEquals("Some records not found", RECORDS, count);
   }
   }
 
 
-  @Test
-  public void testSequenceOutputClassDefaultsToMapRedOutputClass()
+  public void testSequenceOutputClassDefaultsToMapRedOutputClass() 
          throws IOException {
          throws IOException {
     Job job = Job.getInstance();
     Job job = Job.getInstance();
     // Setting Random class to test getSequenceFileOutput{Key,Value}Class
     // Setting Random class to test getSequenceFileOutput{Key,Value}Class
@@ -184,8 +172,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat {
       SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job));
       SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job));
   }
   }
 
 
-  @Test
-  public void testcheckOutputSpecsForbidRecordCompression()
+  public void testcheckOutputSpecsForbidRecordCompression() 
       throws IOException {
       throws IOException {
     Job job = Job.getInstance();
     Job job = Job.getInstance();
     FileSystem fs = FileSystem.getLocal(job.getConfiguration());
     FileSystem fs = FileSystem.getLocal(job.getConfiguration());

+ 5 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java

@@ -22,14 +22,11 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BinaryComparable;
 import org.apache.hadoop.io.BinaryComparable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
 
 
-public class TestBinaryPartitioner {
+public class TestBinaryPartitioner extends TestCase {
 
 
-  @Test
   public void testDefaultOffsets() {
   public void testDefaultOffsets() {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     BinaryPartitioner<?> partitioner = 
     BinaryPartitioner<?> partitioner = 
@@ -53,8 +50,7 @@ public class TestBinaryPartitioner {
     partition2 = partitioner.getPartition(key2, null, 10);
     partition2 = partitioner.getPartition(key2, null, 10);
     assertTrue(partition1 != partition2);
     assertTrue(partition1 != partition2);
   }
   }
-
-  @Test
+  
   public void testCustomOffsets() {
   public void testCustomOffsets() {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 }); 
     BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 }); 
@@ -79,8 +75,7 @@ public class TestBinaryPartitioner {
     partition2 = partitioner.getPartition(key2, null, 10);
     partition2 = partitioner.getPartition(key2, null, 10);
     assertEquals(partition1, partition2);
     assertEquals(partition1, partition2);
   }
   }
-
-  @Test
+  
   public void testLowerBound() {
   public void testLowerBound() {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     BinaryPartitioner.setLeftOffset(conf, 0);
     BinaryPartitioner.setLeftOffset(conf, 0);
@@ -92,8 +87,7 @@ public class TestBinaryPartitioner {
     int partition2 = partitioner.getPartition(key2, null, 10);
     int partition2 = partitioner.getPartition(key2, null, 10);
     assertTrue(partition1 != partition2);
     assertTrue(partition1 != partition2);
   }
   }
-
-  @Test
+  
   public void testUpperBound() {
   public void testUpperBound() {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     BinaryPartitioner.setRightOffset(conf, 4);
     BinaryPartitioner.setRightOffset(conf, 4);

+ 2 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java

@@ -19,17 +19,14 @@ package org.apache.hadoop.mapreduce.lib.partition;
 
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
-import org.junit.Test;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
 
 
-public class TestKeyFieldHelper {
+public class TestKeyFieldHelper extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
   private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
   /**
   /**
    * Test is key-field-helper's parse option.
    * Test is key-field-helper's parse option.
    */
    */
-  @Test
   public void testparseOption() throws Exception {
   public void testparseOption() throws Exception {
     KeyFieldHelper helper = new KeyFieldHelper();
     KeyFieldHelper helper = new KeyFieldHelper();
     helper.setKeyFieldSeparator("\t");
     helper.setKeyFieldSeparator("\t");
@@ -215,7 +212,6 @@ public class TestKeyFieldHelper {
   /**
   /**
    * Test is key-field-helper's getWordLengths.
    * Test is key-field-helper's getWordLengths.
    */
    */
-  @Test
   public void testGetWordLengths() throws Exception {
   public void testGetWordLengths() throws Exception {
     KeyFieldHelper helper = new KeyFieldHelper();
     KeyFieldHelper helper = new KeyFieldHelper();
     helper.setKeyFieldSeparator("\t");
     helper.setKeyFieldSeparator("\t");
@@ -274,7 +270,6 @@ public class TestKeyFieldHelper {
   /**
   /**
    * Test is key-field-helper's getStartOffset/getEndOffset.
    * Test is key-field-helper's getStartOffset/getEndOffset.
    */
    */
-  @Test
   public void testgetStartEndOffset() throws Exception {
   public void testgetStartEndOffset() throws Exception {
     KeyFieldHelper helper = new KeyFieldHelper();
     KeyFieldHelper helper = new KeyFieldHelper();
     helper.setKeyFieldSeparator("\t");
     helper.setKeyFieldSeparator("\t");

+ 2 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java

@@ -19,16 +19,14 @@ package org.apache.hadoop.mapreduce.lib.partition;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
-import org.junit.Test;
 
 
-import static org.junit.Assert.assertEquals;
+import junit.framework.TestCase;
 
 
-public class TestMRKeyFieldBasedPartitioner {
+public class TestMRKeyFieldBasedPartitioner extends TestCase {
 
 
   /**
   /**
    * Test is key-field-based partitioned works with empty key.
    * Test is key-field-based partitioned works with empty key.
    */
    */
-  @Test
   public void testEmptyKey() throws Exception {
   public void testEmptyKey() throws Exception {
     int numReducers = 10;
     int numReducers = 10;
     KeyFieldBasedPartitioner<Text, Text> kfbp = 
     KeyFieldBasedPartitioner<Text, Text> kfbp = 

+ 3 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java

@@ -23,6 +23,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.Comparator;
 
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
@@ -39,11 +41,8 @@ import org.apache.hadoop.io.serializer.JavaSerializationComparator;
 import org.apache.hadoop.io.serializer.Serialization;
 import org.apache.hadoop.io.serializer.Serialization;
 import org.apache.hadoop.io.serializer.WritableSerialization;
 import org.apache.hadoop.io.serializer.WritableSerialization;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
 
 
-public class TestTotalOrderPartitioner {
+public class TestTotalOrderPartitioner extends TestCase {
 
 
   private static final Text[] splitStrings = new Text[] {
   private static final Text[] splitStrings = new Text[] {
     // -inf            // 0
     // -inf            // 0
@@ -141,7 +140,6 @@ public class TestTotalOrderPartitioner {
     return p;
     return p;
   }
   }
 
 
-  @Test
   public void testTotalOrderWithCustomSerialization() throws Exception {
   public void testTotalOrderWithCustomSerialization() throws Exception {
     TotalOrderPartitioner<String, NullWritable> partitioner =
     TotalOrderPartitioner<String, NullWritable> partitioner =
         new TotalOrderPartitioner<String, NullWritable>();
         new TotalOrderPartitioner<String, NullWritable>();
@@ -167,7 +165,6 @@ public class TestTotalOrderPartitioner {
     }
     }
   }
   }
 
 
-  @Test
   public void testTotalOrderMemCmp() throws Exception {
   public void testTotalOrderMemCmp() throws Exception {
     TotalOrderPartitioner<Text,NullWritable> partitioner =
     TotalOrderPartitioner<Text,NullWritable> partitioner =
       new TotalOrderPartitioner<Text,NullWritable>();
       new TotalOrderPartitioner<Text,NullWritable>();
@@ -187,7 +184,6 @@ public class TestTotalOrderPartitioner {
     }
     }
   }
   }
 
 
-  @Test
   public void testTotalOrderBinarySearch() throws Exception {
   public void testTotalOrderBinarySearch() throws Exception {
     TotalOrderPartitioner<Text,NullWritable> partitioner =
     TotalOrderPartitioner<Text,NullWritable> partitioner =
       new TotalOrderPartitioner<Text,NullWritable>();
       new TotalOrderPartitioner<Text,NullWritable>();
@@ -220,7 +216,6 @@ public class TestTotalOrderPartitioner {
     }
     }
   }
   }
 
 
-  @Test
   public void testTotalOrderCustomComparator() throws Exception {
   public void testTotalOrderCustomComparator() throws Exception {
     TotalOrderPartitioner<Text,NullWritable> partitioner =
     TotalOrderPartitioner<Text,NullWritable> partitioner =
       new TotalOrderPartitioner<Text,NullWritable>();
       new TotalOrderPartitioner<Text,NullWritable>();

+ 5 - 10
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java

@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.util;
 import java.io.File;
 import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
 
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 
 
@@ -28,27 +30,20 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
 import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
-import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
 
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 /**
 /**
  * A test for MRAsyncDiskService.
  * A test for MRAsyncDiskService.
  */
  */
-public class TestMRAsyncDiskService {
+public class TestMRAsyncDiskService extends TestCase {
 
 
   public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class);
   public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class);
   
   
   private static String TEST_ROOT_DIR = new Path(System.getProperty(
   private static String TEST_ROOT_DIR = new Path(System.getProperty(
       "test.build.data", "/tmp")).toString();
       "test.build.data", "/tmp")).toString();
   
   
-  @Before
-  public void setUp() {
+  @Override
+  protected void setUp() {
     FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
     FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
   }
   }
 
 

+ 13 - 17
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.hadoop.mapreduce.v2;
 package org.apache.hadoop.mapreduce.v2;
 
 
+import junit.framework.TestCase;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -28,25 +29,22 @@ import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
 
 
+import java.net.InetAddress;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.io.Writer;
-import java.net.InetAddress;
 import java.security.PrivilegedExceptionAction;
 import java.security.PrivilegedExceptionAction;
 
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-public class TestMiniMRProxyUser {
+public class TestMiniMRProxyUser extends TestCase {
 
 
   private MiniDFSCluster dfsCluster = null;
   private MiniDFSCluster dfsCluster = null;
   private MiniMRCluster mrCluster = null;
   private MiniMRCluster mrCluster = null;
-
-  @Before
-  public void setUp() throws Exception {
+    
+  protected void setUp() throws Exception {
+    super.setUp();
     if (System.getProperty("hadoop.log.dir") == null) {
     if (System.getProperty("hadoop.log.dir") == null) {
       System.setProperty("hadoop.log.dir", "/tmp");
       System.setProperty("hadoop.log.dir", "/tmp");
     }
     }
@@ -93,14 +91,15 @@ public class TestMiniMRProxyUser {
     return mrCluster.createJobConf();
     return mrCluster.createJobConf();
   }
   }
   
   
-  @After
-  public void tearDown() throws Exception {
+  @Override
+  protected void tearDown() throws Exception {
     if (mrCluster != null) {
     if (mrCluster != null) {
       mrCluster.shutdown();
       mrCluster.shutdown();
     }
     }
     if (dfsCluster != null) {
     if (dfsCluster != null) {
       dfsCluster.shutdown();
       dfsCluster.shutdown();
     }
     }
+    super.tearDown();
   }
   }
 
 
   private void mrRun() throws Exception {
   private void mrRun() throws Exception {
@@ -126,13 +125,11 @@ public class TestMiniMRProxyUser {
     assertTrue(runJob.isComplete());
     assertTrue(runJob.isComplete());
     assertTrue(runJob.isSuccessful());
     assertTrue(runJob.isSuccessful());
   }
   }
-
-  @Test
+    
   public void __testCurrentUser() throws Exception {
   public void __testCurrentUser() throws Exception {
    mrRun();
    mrRun();
   }  
   }  
 
 
-  @Test
   public void testValidProxyUser() throws Exception {
   public void testValidProxyUser() throws Exception {
     UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser());
     UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser());
     ugi.doAs(new PrivilegedExceptionAction<Void>() {
     ugi.doAs(new PrivilegedExceptionAction<Void>() {
@@ -145,7 +142,6 @@ public class TestMiniMRProxyUser {
     });
     });
   }
   }
 
 
-  @Test
   public void ___testInvalidProxyUser() throws Exception {
   public void ___testInvalidProxyUser() throws Exception {
     UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser());
     UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser());
     ugi.doAs(new PrivilegedExceptionAction<Void>() {
     ugi.doAs(new PrivilegedExceptionAction<Void>() {

+ 7 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.hadoop.mapreduce.v2;
 package org.apache.hadoop.mapreduce.v2;
 
 
+import junit.framework.TestCase;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -27,22 +28,17 @@ import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.InetAddress;
 
 
-import static org.junit.Assert.assertNull;
-
-public class TestNonExistentJob {
+public class TestNonExistentJob extends TestCase {
 
 
   private MiniDFSCluster dfsCluster = null;
   private MiniDFSCluster dfsCluster = null;
   private MiniMRCluster mrCluster = null;
   private MiniMRCluster mrCluster = null;
 
 
-  @Before
-  public void setUp() throws Exception {
+  protected void setUp() throws Exception {
+    super.setUp();
     if (System.getProperty("hadoop.log.dir") == null) {
     if (System.getProperty("hadoop.log.dir") == null) {
       System.setProperty("hadoop.log.dir", "/tmp");
       System.setProperty("hadoop.log.dir", "/tmp");
     }
     }
@@ -82,17 +78,17 @@ public class TestNonExistentJob {
     return mrCluster.createJobConf();
     return mrCluster.createJobConf();
   }
   }
 
 
-  @After
-  public void tearDown() throws Exception {
+  @Override
+  protected void tearDown() throws Exception {
     if (mrCluster != null) {
     if (mrCluster != null) {
       mrCluster.shutdown();
       mrCluster.shutdown();
     }
     }
     if (dfsCluster != null) {
     if (dfsCluster != null) {
       dfsCluster.shutdown();
       dfsCluster.shutdown();
     }
     }
+    super.tearDown();
   }
   }
 
 
-  @Test
   public void testGetInvalidJob() throws Exception {
   public void testGetInvalidJob() throws Exception {
     RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0"));
     RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0"));
     assertNull(runJob);
     assertNull(runJob);

+ 1 - 8
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java

@@ -42,11 +42,6 @@ import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 
 public class TestStreamingBadRecords extends ClusterMapReduceTestCase
 public class TestStreamingBadRecords extends ClusterMapReduceTestCase
 {
 {
@@ -73,8 +68,7 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
     utilTest.redirectIfAntJunit();
     utilTest.redirectIfAntJunit();
   }
   }
 
 
-  @Before
-  public void setUp() throws Exception {
+  protected void setUp() throws Exception {
     Properties props = new Properties();
     Properties props = new Properties();
     props.setProperty(JTConfig.JT_RETIREJOBS, "false");
     props.setProperty(JTConfig.JT_RETIREJOBS, "false");
     props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false");
     props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false");
@@ -248,7 +242,6 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
   }
   }
   */
   */
 
 
-  @Test
   public void testNoOp() {
   public void testNoOp() {
     // Added to avoid warnings when running this disabled test
     // Added to avoid warnings when running this disabled test
   }
   }