Browse Source

MAPREDUCE-6682. TestMRCJCFileOutputCommitter fails intermittently Contributed by Akira Ajisaka.
(cherry picked from commit 8f1c374bec2451568f463ab68f7eb6db734ab14e)

Conflicts:

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java

Jason Lowe 8 years ago
parent
commit
8ffe5eb7bd

+ 9 - 5
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java

@@ -25,6 +25,8 @@ import org.apache.hadoop.fs.RawLocalFileSystem;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.After;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import java.io.File;
 import java.io.File;
@@ -37,8 +39,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertTrue;
 
 
 public class TestMRCJCFileOutputCommitter {
 public class TestMRCJCFileOutputCommitter {
-  private static Path outDir = new Path(
-     System.getProperty("test.build.data", "/tmp"), "output");
+  private static Path outDir = new Path(GenericTestUtils.getTempPath("output"));
 
 
   // A random task attempt id for testing.
   // A random task attempt id for testing.
   private static String attempt = "attempt_200707121733_0001_m_000000_0";
   private static String attempt = "attempt_200707121733_0001_m_000000_0";
@@ -112,12 +113,11 @@ public class TestMRCJCFileOutputCommitter {
     expectedOutput.append(key2).append('\t').append(val2).append("\n");
     expectedOutput.append(key2).append('\t').append(val2).append("\n");
     String output = UtilsForTests.slurp(expectedFile);
     String output = UtilsForTests.slurp(expectedFile);
     assertEquals(output, expectedOutput.toString());
     assertEquals(output, expectedOutput.toString());
-
-    FileUtil.fullyDelete(new File(outDir.toString()));
   }
   }
 
 
   @Test
   @Test
   public void testAbort() throws IOException {
   public void testAbort() throws IOException {
+    FileUtil.fullyDelete(new File(outDir.toString()));
     JobConf job = new JobConf();
     JobConf job = new JobConf();
     setConfForFileOutputCommitter(job);
     setConfForFileOutputCommitter(job);
     JobContext jContext = new JobContextImpl(job, taskID.getJobID());
     JobContext jContext = new JobContextImpl(job, taskID.getJobID());
@@ -152,7 +152,6 @@ public class TestMRCJCFileOutputCommitter {
     assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
     assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
     assertEquals("Output directory not empty", 0, new File(outDir.toString())
     assertEquals("Output directory not empty", 0, new File(outDir.toString())
         .listFiles().length);
         .listFiles().length);
-    FileUtil.fullyDelete(new File(outDir.toString()));
   }
   }
 
 
   public static class FakeFileSystem extends RawLocalFileSystem {
   public static class FakeFileSystem extends RawLocalFileSystem {
@@ -223,4 +222,9 @@ public class TestMRCJCFileOutputCommitter {
     assertTrue(th.getMessage().contains("fake delete failed"));
     assertTrue(th.getMessage().contains("fake delete failed"));
     assertTrue("job temp dir does not exists", jobTmpDir.exists());
     assertTrue("job temp dir does not exists", jobTmpDir.exists());
   }
   }
+
+  @After
+  public void teardown() {
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
 }
 }

+ 23 - 11
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java

@@ -19,11 +19,13 @@
 package org.apache.hadoop.mapred;
 package org.apache.hadoop.mapred;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.jobhistory.EventType;
 import org.apache.hadoop.mapreduce.jobhistory.EventType;
 import org.apache.hadoop.mapreduce.jobhistory.TestJobHistoryEventHandler;
 import org.apache.hadoop.mapreduce.jobhistory.TestJobHistoryEventHandler;
 import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -145,21 +147,21 @@ public class TestMRTimelineEventHandling {
     conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
     conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
     conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
     conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
     MiniMRYarnCluster cluster = null;
     MiniMRYarnCluster cluster = null;
+    FileSystem fs = null;
+    Path inDir = new Path(GenericTestUtils.getTempPath("input"));
+    Path outDir = new Path(GenericTestUtils.getTempPath("output"));
     try {
     try {
+      fs = FileSystem.get(conf);
       cluster = new MiniMRYarnCluster(
       cluster = new MiniMRYarnCluster(
           TestJobHistoryEventHandler.class.getSimpleName(), 1);
           TestJobHistoryEventHandler.class.getSimpleName(), 1);
       cluster.init(conf);
       cluster.init(conf);
       cluster.start();
       cluster.start();
       conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
       conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
           MiniYARNCluster.getHostname() + ":"
           MiniYARNCluster.getHostname() + ":"
-          + cluster.getApplicationHistoryServer().getPort());
+              + cluster.getApplicationHistoryServer().getPort());
       TimelineStore ts = cluster.getApplicationHistoryServer()
       TimelineStore ts = cluster.getApplicationHistoryServer()
           .getTimelineStore();
           .getTimelineStore();
 
 
-      String localPathRoot = System.getProperty("test.build.data",
-          "build/test/data");
-      Path inDir = new Path(localPathRoot, "input");
-      Path outDir = new Path(localPathRoot, "output");
       RunningJob job =
       RunningJob job =
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
       Assert.assertEquals(JobStatus.SUCCEEDED,
       Assert.assertEquals(JobStatus.SUCCEEDED,
@@ -181,6 +183,7 @@ public class TestMRTimelineEventHandling {
       if (cluster != null) {
       if (cluster != null) {
         cluster.stop();
         cluster.stop();
       }
       }
+      deletePaths(fs, inDir, outDir);
     }
     }
 
 
     conf = new YarnConfiguration();
     conf = new YarnConfiguration();
@@ -194,15 +197,10 @@ public class TestMRTimelineEventHandling {
       cluster.start();
       cluster.start();
       conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
       conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
           MiniYARNCluster.getHostname() + ":"
           MiniYARNCluster.getHostname() + ":"
-          + cluster.getApplicationHistoryServer().getPort());
+              + cluster.getApplicationHistoryServer().getPort());
       TimelineStore ts = cluster.getApplicationHistoryServer()
       TimelineStore ts = cluster.getApplicationHistoryServer()
           .getTimelineStore();
           .getTimelineStore();
 
 
-      String localPathRoot = System.getProperty("test.build.data",
-          "build/test/data");
-      Path inDir = new Path(localPathRoot, "input");
-      Path outDir = new Path(localPathRoot, "output");
-
       conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
       conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
       RunningJob job =
       RunningJob job =
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
@@ -225,6 +223,20 @@ public class TestMRTimelineEventHandling {
       if (cluster != null) {
       if (cluster != null) {
         cluster.stop();
         cluster.stop();
       }
       }
+      deletePaths(fs, inDir, outDir);
+    }
+  }
+
+  /** Delete input paths recursively. Paths should not be null. */
+  private void deletePaths(FileSystem fs, Path... paths) {
+    if (fs == null) {
+      return;
+    }
+    for (Path path : paths) {
+      try {
+        fs.delete(path, true);
+      } catch (Exception ignored) {
+      }
     }
     }
   }
   }
 }
 }