Browse Source

svn merge -c 1369197 FIXES: MAPREDUCE-4503. Should throw InvalidJobConfException if duplicates found in cacheArchives or cacheFiles (Robert Evans via jeagles)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1369209 13f79535-47bb-0310-9956-ffa450edef68
Jonathan Turner Eagles 12 years ago
parent
commit
a38fa98287

+ 3 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -382,6 +382,9 @@ Release 0.23.3 - UNRELEASED
     MAPREDUCE-4504. SortValidator writes to wrong directory (Robert Evans 
     via tgraves)
 
+    MAPREDUCE-4503. Should throw InvalidJobConfException if duplicates found in
+    cacheArchives or cacheFiles (Robert Evans via jeagles)
+
 Release 0.23.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 16 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java

@@ -35,6 +35,7 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
@@ -56,6 +57,7 @@ import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.util.BuilderUtils;
+import org.apache.hadoop.yarn.util.ConverterUtils;
 
 /**
  * Helper class for MR applications
@@ -263,6 +265,13 @@ public class MRApps extends Apps {
         DistributedCache.getFileClassPaths(conf));
   }
 
+  private static String getResourceDescription(LocalResourceType type) {
+    if(type == LocalResourceType.ARCHIVE) {
+      return "cache archive (" + MRJobConfig.CACHE_ARCHIVES + ") ";
+    }
+    return "cache file (" + MRJobConfig.CACHE_FILES + ") ";
+  }
+  
   // TODO - Move this to MR!
   // Use TaskDistributedCacheManager.CacheFiles.makeCacheFiles(URI[], 
   // long[], boolean[], Path[], FileType)
@@ -308,6 +317,13 @@ public class MRApps extends Apps {
           throw new IllegalArgumentException("Resource name must be relative");
         }
         String linkName = name.toUri().getPath();
+        LocalResource orig = localResources.get(linkName);
+        if(orig != null && !orig.getResource().equals(
+            ConverterUtils.getYarnUrlFromURI(p.toUri()))) {
+          throw new InvalidJobConfException(
+              getResourceDescription(orig.getType()) + orig.getResource() + 
+              " conflicts with " + getResourceDescription(type) + u);
+        }
         localResources.put(
             linkName,
             BuilderUtils.newLocalResource(

+ 127 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java

@@ -19,24 +19,32 @@
 package org.apache.hadoop.mapreduce.v2.util;
 
 import java.io.IOException;
+import java.net.URI;
 import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FilterFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.api.records.LocalResourceType;
 
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 public class TestMRApps {
 
@@ -168,5 +176,122 @@ public class TestMRApps {
     assertNotSame("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!",
       env_str.indexOf("$PWD:job.jar"), 0);
   }
-
+  
+  @Test
+  public void testSetupDistributedCacheEmpty() throws IOException {
+    Configuration conf = new Configuration();
+    Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+    assertTrue("Empty Config did not produce an empty list of resources",
+        localResources.isEmpty());
+  }
+  
+  @SuppressWarnings("deprecation")
+  @Test(expected = InvalidJobConfException.class)
+  public void testSetupDistributedCacheConflicts() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+    
+    URI mockUri = URI.create("mockfs://mock/");
+    FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+        .getRawFileSystem();
+    
+    URI archive = new URI("mockfs://mock/tmp/something.zip#something");
+    Path archivePath = new Path(archive);
+    URI file = new URI("mockfs://mock/tmp/something.txt#something");
+    Path filePath = new Path(file);
+    
+    when(mockFs.resolvePath(archivePath)).thenReturn(archivePath);
+    when(mockFs.resolvePath(filePath)).thenReturn(filePath);
+    
+    DistributedCache.addCacheArchive(archive, conf);
+    conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_VISIBILITIES, "true");
+    DistributedCache.addCacheFile(file, conf);
+    conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11");
+    conf.set(MRJobConfig.CACHE_FILES_SIZES, "11");
+    conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true");
+    Map<String, LocalResource> localResources = 
+      new HashMap<String, LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+  }
+  
+  @SuppressWarnings("deprecation")
+  @Test(expected = InvalidJobConfException.class)
+  public void testSetupDistributedCacheConflictsFiles() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+    
+    URI mockUri = URI.create("mockfs://mock/");
+    FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+        .getRawFileSystem();
+    
+    URI file = new URI("mockfs://mock/tmp/something.zip#something");
+    Path filePath = new Path(file);
+    URI file2 = new URI("mockfs://mock/tmp/something.txt#something");
+    Path file2Path = new Path(file);
+    
+    when(mockFs.resolvePath(filePath)).thenReturn(filePath);
+    when(mockFs.resolvePath(file2Path)).thenReturn(file2Path);
+    
+    DistributedCache.addCacheFile(file, conf);
+    DistributedCache.addCacheFile(file2, conf);
+    conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "10,11");
+    conf.set(MRJobConfig.CACHE_FILES_SIZES, "10,11");
+    conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true,true");
+    Map<String, LocalResource> localResources = 
+      new HashMap<String, LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+  }
+  
+  @SuppressWarnings("deprecation")
+  @Test
+  public void testSetupDistributedCache() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+    
+    URI mockUri = URI.create("mockfs://mock/");
+    FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+        .getRawFileSystem();
+    
+    URI archive = new URI("mockfs://mock/tmp/something.zip");
+    Path archivePath = new Path(archive);
+    URI file = new URI("mockfs://mock/tmp/something.txt#something");
+    Path filePath = new Path(file);
+    
+    when(mockFs.resolvePath(archivePath)).thenReturn(archivePath);
+    when(mockFs.resolvePath(filePath)).thenReturn(filePath);
+    
+    DistributedCache.addCacheArchive(archive, conf);
+    conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_VISIBILITIES, "true");
+    DistributedCache.addCacheFile(file, conf);
+    conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11");
+    conf.set(MRJobConfig.CACHE_FILES_SIZES, "11");
+    conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true");
+    Map<String, LocalResource> localResources = 
+      new HashMap<String, LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+    assertEquals(2, localResources.size());
+    LocalResource lr = localResources.get("something.zip");
+    assertNotNull(lr);
+    assertEquals(10l, lr.getSize());
+    assertEquals(10l, lr.getTimestamp());
+    assertEquals(LocalResourceType.ARCHIVE, lr.getType());
+    lr = localResources.get("something");
+    assertNotNull(lr);
+    assertEquals(11l, lr.getSize());
+    assertEquals(11l, lr.getTimestamp());
+    assertEquals(LocalResourceType.FILE, lr.getType());
+  }
+  
+  static class MockFileSystem extends FilterFileSystem {
+    MockFileSystem() {
+      super(mock(FileSystem.class));
+    }
+    public void initialize(URI name, Configuration conf) throws IOException {}
+  }
+  
 }