瀏覽代碼

HADOOP-12943. Add -w -r options in dfs -test command. Contributed by Weiwei Yang.

(cherry picked from commit 09e82acaf9a6d7663bc51bbca0cdeca4b582b535)
Akira Ajisaka 9 年之前
父節點
當前提交
a36aa920f4

+ 54 - 26
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

@@ -18,11 +18,14 @@
 
 package org.apache.hadoop.fs.shell;
 
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.LinkedList;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.security.AccessControlException;
 
 /**
  * Perform shell-like file tests 
@@ -38,18 +41,25 @@ class Test extends FsCommand {
   public static final String NAME = "test";
   public static final String USAGE = "-[defsz] <path>";
   public static final String DESCRIPTION =
-    "Answer various questions about <path>, with result via exit status.\n" +
-    "  -d  return 0 if <path> is a directory.\n" +
-    "  -e  return 0 if <path> exists.\n" +
-    "  -f  return 0 if <path> is a file.\n" +
-    "  -s  return 0 if file <path> is greater than zero bytes in size.\n" +
-    "  -z  return 0 if file <path> is zero bytes in size, else return 1.";
+      "Answer various questions about <path>, with result via exit status.\n"
+          + "  -d  return 0 if <path> is a directory.\n"
+          + "  -e  return 0 if <path> exists.\n"
+          + "  -f  return 0 if <path> is a file.\n"
+          + "  -s  return 0 if file <path> is greater "
+          + "        than zero bytes in size.\n"
+          + "  -w  return 0 if file <path> exists "
+          + "        and write permission is granted.\n"
+          + "  -r  return 0 if file <path> exists "
+          + "        and read permission is granted.\n"
+          + "  -z  return 0 if file <path> is "
+          + "        zero bytes in size, else return 1.";
 
   private char flag;
   
   @Override
   protected void processOptions(LinkedList<String> args) {
-    CommandFormat cf = new CommandFormat(1, 1, "e", "d", "f", "s", "z");
+    CommandFormat cf = new CommandFormat(1, 1,
+        "e", "d", "f", "s", "z", "w", "r");
     cf.parse(args);
     
     String[] opts = cf.getOpts().toArray(new String[0]);
@@ -68,29 +78,47 @@ class Test extends FsCommand {
   protected void processPath(PathData item) throws IOException {
     boolean test = false;
     switch (flag) {
-      case 'e':
-        test = true;
-        break;
-      case 'd':
-        test = item.stat.isDirectory();
-        break;
-      case 'f':
-        test = item.stat.isFile();
-        break;
-      case 's':
-        test = (item.stat.getLen() > 0);
-        break;
-      case 'z':
-        test = (item.stat.getLen() == 0);
-        break;
-      default:
-        break;
+    case 'e':
+      test = true;
+      break;
+    case 'd':
+      test = item.stat.isDirectory();
+      break;
+    case 'f':
+      test = item.stat.isFile();
+      break;
+    case 's':
+      test = (item.stat.getLen() > 0);
+      break;
+    case 'z':
+      test = (item.stat.getLen() == 0);
+      break;
+    case 'w':
+      test = testAccess(item, FsAction.WRITE);
+      break;
+    case 'r':
+      test = testAccess(item, FsAction.READ);
+      break;
+    default:
+      break;
+    }
+    if (!test) {
+      exitCode = 1;
+    }
+  }
+
+  private boolean testAccess(PathData item, FsAction action)
+      throws IOException {
+    try {
+      item.fs.access(item.path, action);
+      return true;
+    } catch (AccessControlException | FileNotFoundException e) {
+      return false;
     }
-    if (!test) exitCode = 1;
   }
 
   @Override
   protected void processNonexistentPath(PathData item) throws IOException {
     exitCode = 1;
   }
-}
+}

+ 3 - 0
hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md

@@ -669,8 +669,11 @@ Options:
 * -e: if the path exists, return 0.
 * -f: if the path is a file, return 0.
 * -s: if the path is not empty, return 0.
+* -r: if the path exists and read permission is granted, return 0.
+* -w: if the path exists and write permission is granted, return 0.
 * -z: if the file is zero length, return 0.
 
+
 Example:
 
 * `hadoop fs -test -e filename`

+ 69 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java

@@ -1126,8 +1126,8 @@ public class TestDFSShell {
    * Tests various options of DFSShell.
    */
   @Test (timeout = 120000)
-  public void testDFSShell() throws IOException {
-    Configuration conf = new HdfsConfiguration();
+  public void testDFSShell() throws Exception {
+    final Configuration conf = new HdfsConfiguration();
     /* This tests some properties of ChecksumFileSystem as well.
      * Make sure that we create ChecksumDFS */
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -1479,6 +1479,73 @@ public class TestDFSShell {
         assertEquals(0, val);
       }
 
+      // Verify -test -w/-r
+      {
+        Path permDir = new Path("/test/permDir");
+        Path permFile = new Path("/test/permDir/permFile");
+        mkdir(fs, permDir);
+        writeFile(fs, permFile);
+
+        // Verify -test -w positive case (dir exists and can write)
+        final String[] wargs = new String[3];
+        wargs[0] = "-test";
+        wargs[1] = "-w";
+        wargs[2] = permDir.toString();
+        int val = -1;
+        try {
+          val = shell.run(wargs);
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+              e.getLocalizedMessage());
+        }
+        assertEquals(0, val);
+
+        // Verify -test -r positive case (file exists and can read)
+        final String[] rargs = new String[3];
+        rargs[0] = "-test";
+        rargs[1] = "-r";
+        rargs[2] = permFile.toString();
+        try {
+          val = shell.run(rargs);
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+              e.getLocalizedMessage());
+        }
+        assertEquals(0, val);
+
+        // Verify -test -r negative case (file exists but cannot read)
+        runCmd(shell, "-chmod", "600", permFile.toString());
+
+        UserGroupInformation smokeUser =
+            UserGroupInformation.createUserForTesting("smokeUser",
+                new String[] {"hadoop"});
+        smokeUser.doAs(new PrivilegedExceptionAction<String>() {
+            @Override
+            public String run() throws Exception {
+              FsShell shell = new FsShell(conf);
+              int exitCode = shell.run(rargs);
+              assertEquals(1, exitCode);
+              return null;
+            }
+          });
+
+        // Verify -test -w negative case (dir exists but cannot write)
+        runCmd(shell, "-chown", "-R", "not_allowed", permDir.toString());
+        runCmd(shell, "-chmod", "-R", "700", permDir.toString());
+
+        smokeUser.doAs(new PrivilegedExceptionAction<String>() {
+          @Override
+          public String run() throws Exception {
+            FsShell shell = new FsShell(conf);
+            int exitCode = shell.run(wargs);
+            assertEquals(1, exitCode);
+            return null;
+          }
+        });
+
+        // cleanup
+        fs.delete(permDir, true);
+      }
     } finally {
       try {
         fileSys.close();