Преглед на файлове

HDFS-6039. Uploading a File under a Dir with default acls throws "Duplicated ACLFeature". Contributed by Chris Nauroth.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1573879 13f79535-47bb-0310-9956-ffa450edef68
Chris Nauroth преди 11 години
родител
ревизия
2cfe554980

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -666,6 +666,9 @@ Release 2.4.0 - UNRELEASED
     HDFS-6028. Print clearer error message when user attempts to delete required
     mask entry from ACL. (cnauroth)
 
+    HDFS-6039. Uploading a File under a Dir with default acls throws "Duplicated
+    ACLFeature". (cnauroth)
+
 Release 2.3.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 4 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java

@@ -2246,6 +2246,7 @@ public class FSDirectory implements Closeable {
     final Quota.Counts counts = child.computeQuotaUsage();
     updateCount(iip, pos,
         counts.get(Quota.NAMESPACE), counts.get(Quota.DISKSPACE), checkQuota);
+    boolean isRename = (child.getParent() != null);
     final INodeDirectory parent = inodes[pos-1].asDirectory();
     boolean added = false;
     try {
@@ -2260,7 +2261,9 @@ public class FSDirectory implements Closeable {
           -counts.get(Quota.NAMESPACE), -counts.get(Quota.DISKSPACE));
     } else {
       iip.setINode(pos - 1, child.getParent());
-      AclStorage.copyINodeDefaultAcl(child);
+      if (!isRename) {
+        AclStorage.copyINodeDefaultAcl(child);
+      }
       addToInodeMap(child);
     }
     return added;

+ 39 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSAclBaseTest.java

@@ -1063,6 +1063,45 @@ public abstract class FSAclBaseTest {
     assertAclFeature(dirPath, true);
   }
 
+  @Test
+  public void testDefaultAclRenamedFile() throws Exception {
+    Path dirPath = new Path(path, "dir");
+    FileSystem.mkdirs(fs, dirPath, FsPermission.createImmutable((short)0750));
+    List<AclEntry> aclSpec = Lists.newArrayList(
+      aclEntry(DEFAULT, USER, "foo", ALL));
+    fs.setAcl(dirPath, aclSpec);
+    Path filePath = new Path(path, "file1");
+    fs.create(filePath).close();
+    fs.setPermission(filePath, FsPermission.createImmutable((short)0640));
+    Path renamedFilePath = new Path(dirPath, "file1");
+    fs.rename(filePath, renamedFilePath);
+    AclEntry[] expected = new AclEntry[] { };
+    AclStatus s = fs.getAclStatus(renamedFilePath);
+    AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
+    assertArrayEquals(expected, returned);
+    assertPermission(renamedFilePath, (short)0640);
+    assertAclFeature(renamedFilePath, false);
+  }
+
+  @Test
+  public void testDefaultAclRenamedDir() throws Exception {
+    Path dirPath = new Path(path, "dir");
+    FileSystem.mkdirs(fs, dirPath, FsPermission.createImmutable((short)0750));
+    List<AclEntry> aclSpec = Lists.newArrayList(
+      aclEntry(DEFAULT, USER, "foo", ALL));
+    fs.setAcl(dirPath, aclSpec);
+    Path subdirPath = new Path(path, "subdir");
+    FileSystem.mkdirs(fs, subdirPath, FsPermission.createImmutable((short)0750));
+    Path renamedSubdirPath = new Path(dirPath, "subdir");
+    fs.rename(subdirPath, renamedSubdirPath);
+    AclEntry[] expected = new AclEntry[] { };
+    AclStatus s = fs.getAclStatus(renamedSubdirPath);
+    AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
+    assertArrayEquals(expected, returned);
+    assertPermission(renamedSubdirPath, (short)0750);
+    assertAclFeature(renamedSubdirPath, false);
+  }
+
   @Test
   public void testSkipAclEnforcementPermsDisabled() throws Exception {
     Path bruceDir = new Path(path, "bruce");

+ 50 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testAclCLI.xml

@@ -972,5 +972,55 @@
         </comparator>
       </comparators>
     </test>
+    <test>
+      <description>copyFromLocal: copying file into a directory with a default ACL</description>
+      <test-commands>
+        <command>-fs NAMENODE -mkdir /dir1</command>
+        <command>-fs NAMENODE -setfacl -m default:user:charlie:rwx /dir1</command>
+        <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir1/data15bytes</command>
+        <command>-fs NAMENODE -getfacl /dir1/data15bytes</command>
+      </test-commands>
+      <cleanup-commands>
+        <command>-fs NAMENODE -rm -R /dir1</command>
+      </cleanup-commands>
+      <comparators>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^# file: /dir1/data15bytes$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^# owner: USERNAME$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^# group: supergroup$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^user::rw-$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^user:charlie:rwx\t#effective:r--$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^group::r-x\t#effective:r--$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^mask::r--$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^other::r--$</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpAcrossOutputComparator</type>
+          <expected-output>.*(?!default).*</expected-output>
+        </comparator>
+      </comparators>
+    </test>
   </tests>
 </configuration>