Browse Source

HADOOP-18778. Fixes failing tests when CSE is enabled. (#5763)

Contributed By: Ahmar Suhail <ahmarsu@amazon.co.uk>
ahmarsuhail 1 năm trước cách đây
mục cha
commit
377cd2bd7a

+ 2 - 2
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java

@@ -213,7 +213,7 @@ import static org.apache.hadoop.fs.s3a.Listing.toLocatedFileStatusIterator;
 import static org.apache.hadoop.fs.s3a.S3AUtils.*;
 import static org.apache.hadoop.fs.s3a.Statistic.*;
 import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.INITIALIZE_SPAN;
-import static org.apache.hadoop.fs.s3a.auth.RolePolicies.STATEMENT_ALLOW_SSE_KMS_RW;
+import static org.apache.hadoop.fs.s3a.auth.RolePolicies.STATEMENT_ALLOW_KMS_RW;
 import static org.apache.hadoop.fs.s3a.auth.RolePolicies.allowS3Operations;
 import static org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens.TokenIssuingPolicy.NoTokensAvailable;
 import static org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens.hasDelegationTokenBinding;
@@ -4218,7 +4218,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
     // no attempt is made to qualify KMS access; there's no
     // way to predict read keys, and not worried about granting
     // too much encryption access.
-    statements.add(STATEMENT_ALLOW_SSE_KMS_RW);
+    statements.add(STATEMENT_ALLOW_KMS_RW);
 
     return statements;
   }

+ 1 - 1
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java

@@ -80,7 +80,7 @@ public final class RolePolicies {
    * Statement to allow KMS R/W access access, so full use of
    * SSE-KMS.
    */
-  public static final Statement STATEMENT_ALLOW_SSE_KMS_RW =
+  public static final Statement STATEMENT_ALLOW_KMS_RW =
       statement(true, KMS_ALL_KEYS, KMS_ALL_OPERATIONS);
 
   /**

+ 1 - 0
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingCacheFiles.java

@@ -105,6 +105,7 @@ public class ITestS3APrefetchingCacheFiles extends AbstractS3ACostTest {
   @Test
   public void testCacheFileExistence() throws Throwable {
     describe("Verify that FS cache files exist on local FS");
+    skipIfClientSideEncryption();
 
     try (FSDataInputStream in = fs.open(testFile)) {
       byte[] buffer = new byte[prefetchBlockSize];

+ 3 - 0
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingInputStream.java

@@ -106,6 +106,7 @@ public class ITestS3APrefetchingInputStream extends AbstractS3ACostTest {
   @Test
   public void testReadLargeFileFully() throws Throwable {
     describe("read a large file fully, uses S3ACachingInputStream");
+    skipIfClientSideEncryption();
     IOStatistics ioStats;
     createLargeFile();
 
@@ -139,6 +140,7 @@ public class ITestS3APrefetchingInputStream extends AbstractS3ACostTest {
   public void testReadLargeFileFullyLazySeek() throws Throwable {
     describe("read a large file using readFully(position,buffer,offset,length),"
         + " uses S3ACachingInputStream");
+    skipIfClientSideEncryption();
     IOStatistics ioStats;
     createLargeFile();
 
@@ -170,6 +172,7 @@ public class ITestS3APrefetchingInputStream extends AbstractS3ACostTest {
   @Test
   public void testRandomReadLargeFile() throws Throwable {
     describe("random read on a large file, uses S3ACachingInputStream");
+    skipIfClientSideEncryption();
     IOStatistics ioStats;
     createLargeFile();
 

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ARequesterPays.java

@@ -59,7 +59,7 @@ public class ITestS3ARequesterPays extends AbstractS3ATestBase {
   @Test
   public void testRequesterPaysOptionSuccess() throws Throwable {
     describe("Test requester pays enabled case by reading last then first byte");
-
+    skipIfClientSideEncryption();
     Configuration conf = this.createConfiguration();
     conf.setBoolean(ALLOW_REQUESTER_PAYS, true);
     // Enable bucket exists check, the first failure point people may encounter

+ 6 - 12
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java

@@ -426,8 +426,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     bindRolePolicy(conf,
         policy(
             statement(false, S3_ALL_BUCKETS, S3_GET_OBJECT_TORRENT),
-            ALLOW_S3_GET_BUCKET_LOCATION,
-            STATEMENT_ALLOW_SSE_KMS_RW));
+            ALLOW_S3_GET_BUCKET_LOCATION, STATEMENT_ALLOW_KMS_RW));
     Path path = path("testAssumeRoleStillIncludesRolePerms");
     roleFS = (S3AFileSystem) path.getFileSystem(conf);
     assertTouchForbidden(roleFS, path);
@@ -447,8 +446,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     bindRolePolicy(conf,
         policy(
             statement(false, S3_ALL_BUCKETS, S3_PATH_WRITE_OPERATIONS),
-            STATEMENT_ALL_S3,
-            STATEMENT_ALLOW_SSE_KMS_READ));
+            STATEMENT_ALL_S3, STATEMENT_ALLOW_KMS_RW));
     Path path = methodPath();
     roleFS = (S3AFileSystem) path.getFileSystem(conf);
     // list the root path, expect happy
@@ -495,8 +493,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     Configuration conf = createAssumedRoleConfig();
 
     bindRolePolicyStatements(conf,
-        STATEMENT_ALL_BUCKET_READ_ACCESS,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+        STATEMENT_ALL_BUCKET_READ_ACCESS, STATEMENT_ALLOW_KMS_RW,
         new Statement(Effects.Allow)
           .addActions(S3_ALL_OPERATIONS)
           .addResources(directory(restrictedDir)));
@@ -563,8 +560,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     fs.delete(basePath, true);
     fs.mkdirs(readOnlyDir);
 
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,
         new Statement(Effects.Allow)
             .addActions(S3_PATH_RW_OPERATIONS)
@@ -714,8 +710,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     S3AFileSystem fs = getFileSystem();
     fs.delete(destDir, true);
 
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
         new Statement(Effects.Deny)
             .addActions(S3_PATH_WRITE_OPERATIONS)
@@ -746,8 +741,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     describe("Restrict role to read only");
     Configuration conf = createAssumedRoleConfig();
 
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
         statement(false, S3_ALL_BUCKETS, S3_GET_BUCKET_LOCATION));
     Path path = methodPath();

+ 1 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumedRoleCommitOperations.java

@@ -61,8 +61,7 @@ public class ITestAssumedRoleCommitOperations extends ITestCommitOperations {
     restrictedDir = super.path("restricted");
     Configuration conf = newAssumedRoleConfig(getConfiguration(),
         getAssumedRoleARN());
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_BUCKET_READ_OPERATIONS),
         new RoleModel.Statement(RoleModel.Effects.Allow)
             .addActions(S3_PATH_RW_OPERATIONS)

+ 1 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestRestrictedReadAccess.java

@@ -260,8 +260,7 @@ public class ITestRestrictedReadAccess extends AbstractS3ATestBase {
     // it still has write access, which can be explored in the final
     // step to delete files and directories.
     roleConfig = createAssumedRoleConfig();
-    bindRolePolicyStatements(roleConfig,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(roleConfig, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
         new Statement(Effects.Deny)
             .addActions(S3_ALL_GET)

+ 12 - 7
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestPartialRenamesDeletes.java

@@ -56,6 +56,7 @@ import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_DELETE_REQUEST;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.Effects;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.Statement;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.directory;
+import static org.apache.hadoop.fs.s3a.auth.RoleModel.resource;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.statement;
 import static org.apache.hadoop.fs.s3a.auth.RolePolicies.*;
 import static org.apache.hadoop.fs.s3a.auth.RoleTestUtils.bindRolePolicyStatements;
@@ -144,6 +145,11 @@ public class ITestPartialRenamesDeletes extends AbstractS3ATestBase {
    */
   private Path writableDir;
 
+  /**
+   * Instruction file created when using CSE, required to be added to policies.
+   */
+  private Path writableDirInstructionFile;
+
   /**
    * A directory to which restricted roles have only read access.
    */
@@ -216,6 +222,7 @@ public class ITestPartialRenamesDeletes extends AbstractS3ATestBase {
     basePath = uniquePath();
     readOnlyDir = new Path(basePath, "readonlyDir");
     writableDir = new Path(basePath, "writableDir");
+    writableDirInstructionFile = new Path(basePath, "writableDir.instruction");
     readOnlyChild = new Path(readOnlyDir, "child");
     noReadDir = new Path(basePath, "noReadDir");
     // the full FS
@@ -225,8 +232,7 @@ public class ITestPartialRenamesDeletes extends AbstractS3ATestBase {
 
     // create the baseline assumed role
     assumedRoleConfig = createAssumedRoleConfig();
-    bindRolePolicyStatements(assumedRoleConfig,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(assumedRoleConfig, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,  // root:     r-x
         new Statement(Effects.Allow)       // dest:     rwx
             .addActions(S3_PATH_RW_OPERATIONS)
@@ -365,13 +371,13 @@ public class ITestPartialRenamesDeletes extends AbstractS3ATestBase {
   public void testRenameParentPathNotWriteable() throws Throwable {
     describe("rename with parent paths not writeable; multi=%s", multiDelete);
     final Configuration conf = createAssumedRoleConfig();
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,
         new Statement(Effects.Allow)
             .addActions(S3_PATH_RW_OPERATIONS)
             .addResources(directory(readOnlyDir))
-            .addResources(directory(writableDir)));
+            .addResources(directory(writableDir))
+            .addResources(resource(writableDirInstructionFile, false, false)));
     roleFS = (S3AFileSystem) readOnlyDir.getFileSystem(conf);
 
     S3AFileSystem fs = getFileSystem();
@@ -733,8 +739,7 @@ public class ITestPartialRenamesDeletes extends AbstractS3ATestBase {
     // s3:DeleteObjectVersion permission, and attempt rename
     // and then delete.
     Configuration roleConfig = createAssumedRoleConfig();
-    bindRolePolicyStatements(roleConfig,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(roleConfig, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,  // root:     r-x
         new Statement(Effects.Allow)       // dest:     rwx
             .addActions(S3_PATH_RW_OPERATIONS)

+ 6 - 4
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardTool.java

@@ -70,6 +70,7 @@ public class ITestS3GuardTool extends AbstractS3GuardToolTestBase {
 
   @Test
   public void testLandsatBucketRequireUnencrypted() throws Throwable {
+    skipIfClientSideEncryption();
     run(BucketInfo.NAME,
         "-" + BucketInfo.ENCRYPTION_FLAG, "none",
         getLandsatCSVFile(getConfiguration()));
@@ -178,8 +179,9 @@ public class ITestS3GuardTool extends AbstractS3GuardToolTestBase {
       // least a second old
       describe("Sleeping 1 second then confirming upload still there");
       Thread.sleep(1000);
-      LambdaTestUtils.eventually(5000, 1000,
-          () -> { assertNumUploadsAge(path, 1, 1); });
+      LambdaTestUtils.eventually(5000, 1000, () -> {
+        assertNumUploadsAge(path, 1, 1);
+      });
 
       // 7. Assert deletion works when age filter matches
       describe("Doing aged deletion");
@@ -231,8 +233,8 @@ public class ITestS3GuardTool extends AbstractS3GuardToolTestBase {
    *                   search all parts
    * @throws Exception on failure
    */
-  private void uploadCommandAssertCount(S3AFileSystem fs, String options[],
-      Path path, int numUploads, int ageSeconds)
+  private void uploadCommandAssertCount(S3AFileSystem fs, String[] options, Path path,
+      int numUploads, int ageSeconds)
       throws Exception {
     List<String> allOptions = new ArrayList<>();
     List<String> output = new ArrayList<>();