瀏覽代碼

Backport of HADOOP-7818 from trunk. DiskChecker#checkDir should fail if the directory is not executable. Contributed by Madhukara Phatak. (harsh)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1361718 13f79535-47bb-0310-9956-ffa450edef68
Harsh J 12 年之前
父節點
當前提交
a016d8e9a9

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -366,6 +366,9 @@ Release 2.0.0-alpha - 05-23-2012
     HADOOP-8422. Deprecate FileSystem#getDefault* and getServerDefault
     methods that don't take a Path argument. (eli)
 
+    HADOOP-7818. DiskChecker#checkDir should fail if the directory is
+    not executable. (Madhukara Phatak via harsh)
+
   BUG FIXES
 
     HADOOP-8199. Fix issues in start-all.sh and stop-all.sh (Devaraj K via umamahesh)

+ 14 - 8
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java

@@ -78,26 +78,32 @@ public class DiskChecker {
   }
   
   /**
-   * Create the directory if it doesn't exist and
+   * Create the directory if it doesn't exist and check that dir is readable,
+   * writable and executable
+   *  
    * @param dir
    * @throws DiskErrorException
    */
   public static void checkDir(File dir) throws DiskErrorException {
     if (!mkdirsWithExistsCheck(dir))
-      throw new DiskErrorException("can not create directory: " 
+      throw new DiskErrorException("Can not create directory: "
                                    + dir.toString());
-        
+
     if (!dir.isDirectory())
-      throw new DiskErrorException("not a directory: " 
+      throw new DiskErrorException("Not a directory: "
                                    + dir.toString());
-            
+
     if (!dir.canRead())
-      throw new DiskErrorException("directory is not readable: " 
+      throw new DiskErrorException("Directory is not readable: "
                                    + dir.toString());
-            
+
     if (!dir.canWrite())
-      throw new DiskErrorException("directory is not writable: " 
+      throw new DiskErrorException("Directory is not writable: "
                                    + dir.toString());
+
+    if (!dir.canExecute())
+      throw new DiskErrorException("Directory is not executable: "
+	  + dir.toString());
   }
 
   /**

+ 49 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java

@@ -124,4 +124,53 @@ public class TestDiskChecker {
     }
     System.out.println("checkDir success: "+ success);
   }
+
+  /**
+   * These test cases test to test the creation of a local folder with correct
+   * permission for result of mapper.
+   */
+
+  @Test
+  public void testCheckDir_normal_local() throws Throwable {
+    _checkDirs(true, "755", true);
+  }
+
+  @Test
+  public void testCheckDir_notDir_local() throws Throwable {
+    _checkDirs(false, "000", false);
+  }
+
+  @Test
+  public void testCheckDir_notReadable_local() throws Throwable {
+    _checkDirs(true, "000", false);
+  }
+
+  @Test
+  public void testCheckDir_notWritable_local() throws Throwable {
+    _checkDirs(true, "444", false);
+  }
+
+  @Test
+  public void testCheckDir_notListable_local() throws Throwable {
+    _checkDirs(true, "666", false);
+  }
+
+  private void _checkDirs(boolean isDir, String perm, boolean success)
+      throws Throwable {
+    File localDir = File.createTempFile("test", "tmp");
+    localDir.delete();
+    localDir.mkdir();
+    Runtime.getRuntime().exec(
+	"chmod " + perm + "  " + localDir.getAbsolutePath()).waitFor();
+    try {
+      DiskChecker.checkDir(localDir);
+      assertTrue("checkDir success", success);
+    } catch (DiskErrorException e) {
+      e.printStackTrace();
+      assertFalse("checkDir success", success);
+    }
+    localDir.delete();
+    System.out.println("checkDir success: " + success);
+
+  }
 }