Ver Fonte

HADOOP-18820. Cut AWS v1 support (#5872)

This removes the AWS V1 SDK as a hadoop-aws runtime dependency.

It is still used at compile time so as to build a wrapper class
V1ToV2AwsCredentialProviderAdapter which allows v1 credential provider
to be used for authentication.
All well known credential providers have their classname remapped from
v1 to v2 classes prior to instantiation; this wrapper is not needed
for them.

There is no support for migrating other SDK plugin points
(signing, handlers)

Access to the v2 S3Client class used by an S3A FileSystem
instance is now via a new interface org.apache.hadoop.fs.s3a.S3AInternals;
other low-level operations (getObjectMetadata(Path)) have moved.

Contributed by Steve Loughran
Steve Loughran há 1 ano atrás
pai
commit
2253c62f4a
70 ficheiros alterados com 2305 adições e 1223 exclusões
  1. 2 1
      LICENSE-binary
  2. 11 41
      hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
  3. 13 1
      hadoop-project/pom.xml
  4. 5 0
      hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
  5. 20 2
      hadoop-tools/hadoop-aws/pom.xml
  6. 14 51
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCredentialProviderList.java
  7. 8 0
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
  8. 0 111
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/InconsistentS3ClientFactory.java
  9. 1 0
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/ProgressableProgressListener.java
  10. 127 63
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
  11. 110 0
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInternals.java
  12. 23 18
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
  13. 7 4
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/SimpleAWSCredentialsProvider.java
  14. 44 1
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Tristate.java
  15. 118 0
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/AwsV1BindingSupport.java
  16. 103 12
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1ToV2AwsCredentialProviderAdapter.java
  17. 0 36
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1V2AwsCredentialProviderAdapter.java
  18. 4 0
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/package-info.java
  19. 5 1
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/api/RequestFactory.java
  20. 16 7
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/ActiveAuditManagerS3A.java
  21. 14 15
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java
  22. 0 283
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AwsCredentialListProvider.java
  23. 303 0
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/CredentialProviderListFactory.java
  24. 8 1
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/IAMInstanceCredentialsProvider.java
  25. 1 1
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/SignerFactory.java
  26. 0 5
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/S3ADelegationTokens.java
  27. 23 8
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/SessionTokenBinding.java
  28. 1 1
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSClientConfig.java
  29. 9 9
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSHeaders.java
  30. 180 0
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/InstantiationIOException.java
  31. 16 64
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/V2Migration.java
  32. 1 1
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/assumed_roles.md
  33. 39 9
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/auditing.md
  34. 323 18
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/aws_sdk_upgrade.md
  35. 1 1
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/delegation_tokens.md
  36. 46 96
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
  37. 38 5
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/s3_select.md
  38. 4 51
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
  39. 40 55
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md
  40. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java
  41. 8 0
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java
  42. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java
  43. 97 44
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java
  44. 8 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABucketExistence.java
  45. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACannedACLs.java
  46. 54 46
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
  47. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java
  48. 2 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java
  49. 1 10
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java
  50. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java
  51. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java
  52. 5 0
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java
  53. 137 116
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java
  54. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java
  55. 222 0
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/adapter/TestV1CredentialsProvider.java
  56. 0 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/AbstractAuditingTest.java
  57. 8 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/ITestAuditManager.java
  58. 18 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/SimpleAWSExecutionInterceptor.java
  59. 9 3
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
  60. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestCustomSigner.java
  61. 5 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/RoleTestUtils.java
  62. 25 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/CountInvocationsProvider.java
  63. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFilesystem.java
  64. 5 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationTokens.java
  65. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java
  66. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestDirectoryMarkerListing.java
  67. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesStorageClass.java
  68. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/StreamPublisher.java
  69. 3 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/TestSelectEventStreamPublisher.java
  70. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/statistics/ITestAWSStatisticCollection.java

+ 2 - 1
LICENSE-binary

@@ -214,7 +214,6 @@ com.aliyun:aliyun-java-sdk-kms:2.11.0
 com.aliyun:aliyun-java-sdk-ram:3.1.0
 com.aliyun:aliyun-java-sdk-sts:3.0.0
 com.aliyun.oss:aliyun-sdk-oss:3.13.0
-com.amazonaws:aws-java-sdk-bundle:1.12.367
 com.cedarsoftware:java-util:1.9.0
 com.cedarsoftware:json-io:2.5.1
 com.fasterxml.jackson.core:jackson-annotations:2.12.7
@@ -368,6 +367,8 @@ org.objenesis:objenesis:2.6
 org.xerial.snappy:snappy-java:1.1.10.1
 org.yaml:snakeyaml:2.0
 org.wildfly.openssl:wildfly-openssl:1.1.3.Final
+software.amazon.awssdk:bundle:jar:2.19.12
+software.amazon.awssdk.crt:aws-crt:0.21.0
 
 
 --------------------------------------------------------------------------------

+ 11 - 41
hadoop-common-project/hadoop-common/src/main/resources/core-default.xml

@@ -1201,17 +1201,24 @@
   <description>AWS secret key used by S3A file system. Omit for IAM role-based or provider-based authentication.</description>
 </property>
 
+<property>
+  <name>fs.s3a.session.token</name>
+  <description>Session token, when using org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider
+    as one of the providers.
+  </description>
+</property>
+
 <property>
   <name>fs.s3a.aws.credentials.provider</name>
   <value>
     org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider,
     org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider,
-    com.amazonaws.auth.EnvironmentVariableCredentialsProvider,
+    software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider,
     org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider
   </value>
   <description>
     Comma-separated class names of credential provider classes which implement
-    com.amazonaws.auth.AWSCredentialsProvider.
+    software.amazon.awssdk.auth.credentials.AwsCredentialsProvider.
 
     When S3A delegation tokens are not enabled, this list will be used
     to directly authenticate with S3 and other AWS services.
@@ -1219,43 +1226,6 @@
     token binding it may be used
     to communicate wih the STS endpoint to request session/role
     credentials.
-
-    These are loaded and queried in sequence for a valid set of credentials.
-    Each listed class must implement one of the following means of
-    construction, which are attempted in order:
-    * a public constructor accepting java.net.URI and
-        org.apache.hadoop.conf.Configuration,
-    * a public constructor accepting org.apache.hadoop.conf.Configuration,
-    * a public static method named getInstance that accepts no
-       arguments and returns an instance of
-       com.amazonaws.auth.AWSCredentialsProvider, or
-    * a public default constructor.
-
-    Specifying org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider allows
-    anonymous access to a publicly accessible S3 bucket without any credentials.
-    Please note that allowing anonymous access to an S3 bucket compromises
-    security and therefore is unsuitable for most use cases. It can be useful
-    for accessing public data sets without requiring AWS credentials.
-
-    If unspecified, then the default list of credential provider classes,
-    queried in sequence, is:
-    * org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider: looks
-       for session login secrets in the Hadoop configuration.
-    * org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider:
-       Uses the values of fs.s3a.access.key and fs.s3a.secret.key.
-    * com.amazonaws.auth.EnvironmentVariableCredentialsProvider: supports
-        configuration of AWS access key ID and secret access key in
-        environment variables named AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY,
-        and AWS_SESSION_TOKEN as documented in the AWS SDK.
-    * org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider: picks up
-       IAM credentials of any EC2 VM or AWS container in which the process is running.
-  </description>
-</property>
-
-<property>
-  <name>fs.s3a.session.token</name>
-  <description>Session token, when using org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider
-    as one of the providers.
   </description>
 </property>
 
@@ -1353,10 +1323,10 @@
     Note: for job submission to actually collect these tokens,
     Kerberos must be enabled.
 
-    Options are:
+    Bindings available in hadoop-aws are:
     org.apache.hadoop.fs.s3a.auth.delegation.SessionTokenBinding
     org.apache.hadoop.fs.s3a.auth.delegation.FullCredentialsTokenBinding
-    and org.apache.hadoop.fs.s3a.auth.delegation.RoleTokenBinding
+    org.apache.hadoop.fs.s3a.auth.delegation.RoleTokenBinding
   </description>
 </property>
 

+ 13 - 1
hadoop-project/pom.xml

@@ -189,6 +189,7 @@
     <aws-java-sdk.version>1.12.367</aws-java-sdk.version>
     <hsqldb.version>2.7.1</hsqldb.version>
     <aws-java-sdk-v2.version>2.19.12</aws-java-sdk-v2.version>
+    <aws.evenstream.version>1.0.1</aws.evenstream.version>
     <awscrt.version>0.21.0</awscrt.version>
     <frontend-maven-plugin.version>1.11.2</frontend-maven-plugin.version>
     <jasmine-maven-plugin.version>2.1</jasmine-maven-plugin.version>
@@ -1111,6 +1112,12 @@
         <groupId>com.amazonaws</groupId>
         <artifactId>aws-java-sdk-core</artifactId>
         <version>${aws-java-sdk.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>*</groupId>
+            <artifactId>*</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>software.amazon.awssdk</groupId>
@@ -1118,11 +1125,16 @@
         <version>${aws-java-sdk-v2.version}</version>
         <exclusions>
           <exclusion>
-            <groupId>io.netty</groupId>
+            <groupId>*</groupId>
             <artifactId>*</artifactId>
           </exclusion>
         </exclusions>
       </dependency>
+      <dependency>
+        <groupId>software.amazon.eventstream</groupId>
+        <artifactId>eventstream</artifactId>
+        <version>${aws.evenstream.version}</version>
+      </dependency>
       <dependency>
         <groupId>software.amazon.awssdk.crt</groupId>
         <artifactId>aws-crt</artifactId>

+ 5 - 0
hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml

@@ -64,6 +64,11 @@
     <Field name="futurePool"/>
     <Bug pattern="IS2_INCONSISTENT_SYNC"/>
   </Match>
+  <Match>
+    <Class name="org.apache.hadoop.fs.s3a.S3AFileSystem"/>
+    <Field name="s3AsyncClient"/>
+    <Bug pattern="IS2_INCONSISTENT_SYNC"/>
+  </Match>
   <Match>
     <Class name="org.apache.hadoop.fs.s3a.s3guard.S3GuardTool$BucketInfo"/>
     <Method name="run"/>

+ 20 - 2
hadoop-tools/hadoop-aws/pom.xml

@@ -463,6 +463,16 @@
                     <bannedImport>org.apache.hadoop.mapred.**</bannedImport>
                   </bannedImports>
                 </restrictImports>
+                <restrictImports>
+                  <includeTestCode>false</includeTestCode>
+                  <reason>Restrict AWS v1 imports to adapter code</reason>
+                  <exclusions>
+                    <exclusion>org.apache.hadoop.fs.s3a.adapter.V1ToV2AwsCredentialProviderAdapter</exclusion>
+                  </exclusions>
+                  <bannedImports>
+                    <bannedImport>com.amazonaws.**</bannedImport>
+                  </bannedImports>
+                </restrictImports>
               </rules>
             </configuration>
           </execution>
@@ -483,10 +493,14 @@
       <scope>test</scope>
       <type>test-jar</type>
     </dependency>
+
+    <!-- The v1 SDK is used at compilation time for adapter classes in
+         org.apache.hadoop.fs.s3a.adapter. It is not needed at runtime
+         unless a non-standard v1 credential provider is declared. -->
     <dependency>
       <groupId>com.amazonaws</groupId>
       <artifactId>aws-java-sdk-core</artifactId>
-      <scope>compile</scope>
+      <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>software.amazon.awssdk</groupId>
@@ -496,7 +510,11 @@
     <dependency>
       <groupId>software.amazon.awssdk.crt</groupId>
       <artifactId>aws-crt</artifactId>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>software.amazon.eventstream</groupId>
+      <artifactId>eventstream</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.assertj</groupId>

+ 14 - 51
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCredentialProviderList.java

@@ -27,27 +27,21 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Collectors;
 
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.AWSCredentialsProvider;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.auth.BasicSessionCredentials;
-import org.apache.hadoop.fs.s3a.adapter.V1V2AwsCredentialProviderAdapter;
-import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException;
 import org.apache.hadoop.fs.s3a.auth.NoAwsCredentialsException;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.Preconditions;
 
 import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
 import software.amazon.awssdk.auth.credentials.AwsCredentials;
 import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
-import software.amazon.awssdk.auth.credentials.AwsSessionCredentials;
 import software.amazon.awssdk.core.exception.SdkException;
 
 /**
@@ -105,23 +99,8 @@ public final class AWSCredentialProviderList implements AwsCredentialsProvider,
    * @param providers provider list.
    */
   public AWSCredentialProviderList(
-      Collection<AWSCredentialsProvider> providers) {
-    for (AWSCredentialsProvider provider: providers) {
-      this.providers.add(V1V2AwsCredentialProviderAdapter.adapt(provider));
-    }
-  }
-
-  /**
-   * Create with an initial list of providers.
-   * @param name name for error messages, may be ""
-   * @param providerArgs provider list.
-   */
-  public AWSCredentialProviderList(final String name,
-      final AWSCredentialsProvider... providerArgs) {
-    setName(name);
-    for (AWSCredentialsProvider provider: providerArgs) {
-      this.providers.add(V1V2AwsCredentialProviderAdapter.adapt(provider));
-    }
+      Collection<AwsCredentialsProvider> providers) {
+    this.providers.addAll(providers);
   }
 
   /**
@@ -147,14 +126,6 @@ public final class AWSCredentialProviderList implements AwsCredentialsProvider,
     }
   }
 
-  /**
-   * Add a new provider.
-   * @param provider provider
-   */
-  public void add(AWSCredentialsProvider provider) {
-    providers.add(V1V2AwsCredentialProviderAdapter.adapt(provider));
-  }
-
   /**
    * Add a new SDK V2 provider.
    * @param provider provider
@@ -163,7 +134,6 @@ public final class AWSCredentialProviderList implements AwsCredentialsProvider,
     providers.add(provider);
   }
 
-
   /**
    * Add all providers from another list to this one.
    * @param other the other list.
@@ -173,19 +143,11 @@ public final class AWSCredentialProviderList implements AwsCredentialsProvider,
   }
 
   /**
-   * This method will get credentials using SDK V2's resolveCredentials and then convert it into
-   * V1 credentials. This required by delegation token binding classes.
-   * @return SDK V1 credentials
+   * Was an implementation of the v1 refresh; now just
+   * a no-op.
    */
-  public AWSCredentials getCredentials() {
-    AwsCredentials credentials = resolveCredentials();
-    if (credentials instanceof AwsSessionCredentials) {
-      return new BasicSessionCredentials(credentials.accessKeyId(),
-          credentials.secretAccessKey(),
-          ((AwsSessionCredentials) credentials).sessionToken());
-    } else {
-      return new BasicAWSCredentials(credentials.accessKeyId(), credentials.secretAccessKey());
-    }
+  @Deprecated
+  public void refresh() {
   }
 
   /**
@@ -256,8 +218,7 @@ public final class AWSCredentialProviderList implements AwsCredentialsProvider,
    *
    * @return providers
    */
-  @VisibleForTesting
-  List<AwsCredentialsProvider> getProviders() {
+  public List<AwsCredentialsProvider> getProviders() {
     return providers;
   }
 
@@ -289,9 +250,11 @@ public final class AWSCredentialProviderList implements AwsCredentialsProvider,
    */
   @Override
   public String toString() {
-    return "AWSCredentialProviderList[" +
-        name +
-        "refcount= " + refCount.get() + ": [" +
+    return "AWSCredentialProviderList"
+        + " name=" + name
+        + "; refcount= " + refCount.get()
+        + "; size="+ providers.size()
+        + ": [" +
         StringUtils.join(providers, ", ") + ']'
         + (lastProvider != null ? (" last provider: " + lastProvider) : "");
   }

+ 8 - 0
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java

@@ -727,11 +727,19 @@ public final class Constants {
   public static final String STREAM_READ_GAUGE_INPUT_POLICY =
       "stream_read_gauge_input_policy";
 
+  /**
+   * S3 Client Factory implementation class: {@value}.
+   * Unstable and incompatible between v1 and v2 SDK versions.
+   */
   @InterfaceAudience.Private
   @InterfaceStability.Unstable
   public static final String S3_CLIENT_FACTORY_IMPL =
       "fs.s3a.s3.client.factory.impl";
 
+  /**
+   * Default factory:
+   * {@code org.apache.hadoop.fs.s3a.DefaultS3ClientFactory}.
+   */
   @InterfaceAudience.Private
   @InterfaceStability.Unstable
   public static final Class<? extends S3ClientFactory>

+ 0 - 111
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/InconsistentS3ClientFactory.java

@@ -1,111 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3a;
-
-import java.io.IOException;
-import java.util.concurrent.atomic.AtomicLong;
-
-import software.amazon.awssdk.awscore.exception.AwsServiceException;
-import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
-import software.amazon.awssdk.core.exception.SdkException;
-import software.amazon.awssdk.core.interceptor.Context;
-import software.amazon.awssdk.core.interceptor.ExecutionAttributes;
-import software.amazon.awssdk.core.interceptor.ExecutionInterceptor;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * S3 Client factory used for testing with eventual consistency fault injection.
- * This client is for testing <i>only</i>; it is in the production
- * {@code hadoop-aws} module to enable integration tests to use this
- * just by editing the Hadoop configuration used to bring up the client.
- *
- * The factory injects an {@link ExecutionInterceptor} to inject failures.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class InconsistentS3ClientFactory extends DefaultS3ClientFactory {
-
-  @Override
-  protected ClientOverrideConfiguration createClientOverrideConfiguration(
-      S3ClientCreationParameters parameters, Configuration conf) throws IOException {
-    LOG.warn("** FAILURE INJECTION ENABLED.  Do not run in production! **");
-    LOG.warn("List inconsistency is no longer emulated; only throttling and read errors");
-    return super.createClientOverrideConfiguration(parameters, conf)
-        .toBuilder()
-        .addExecutionInterceptor(new FailureInjectionInterceptor(
-            new FailureInjectionPolicy(conf)))
-        .build();
-  }
-
-  private static class FailureInjectionInterceptor implements ExecutionInterceptor {
-
-    private final FailureInjectionPolicy policy;
-
-    /**
-     * Counter of failures since last reset.
-     */
-    private final AtomicLong failureCounter = new AtomicLong(0);
-
-   FailureInjectionInterceptor(FailureInjectionPolicy policy) {
-      this.policy = policy;
-    }
-
-    @Override
-    public void beforeExecution(Context.BeforeExecution context,
-        ExecutionAttributes executionAttributes) {
-      maybeFail();
-    }
-
-    private void maybeFail() {
-      maybeFail("throttled", 503);
-    }
-
-    /**
-     * Conditionally fail the operation.
-     * @param errorMsg description of failure
-     * @param statusCode http status code for error
-     * @throws SdkException if the client chooses to fail
-     * the request.
-     */
-    private void maybeFail(String errorMsg, int statusCode)
-        throws SdkException {
-      // code structure here is to line up for more failures later
-      AwsServiceException ex = null;
-      if (FailureInjectionPolicy.trueWithProbability(policy.getThrottleProbability())) {
-        // throttle the request
-        ex = AwsServiceException.builder()
-            .message(errorMsg + " count = " + (failureCounter.get() + 1))
-            .statusCode(statusCode)
-            .build();
-      }
-
-      int failureLimit = policy.getFailureLimit();
-      if (ex != null) {
-        long count = failureCounter.incrementAndGet();
-        if (failureLimit == 0
-            || (failureLimit > 0 && count < failureLimit)) {
-          throw ex;
-        }
-      }
-    }
-  }
-}

+ 1 - 0
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/ProgressableProgressListener.java

@@ -75,6 +75,7 @@ public class ProgressableProgressListener implements TransferListener {
   /**
    * Method to invoke after upload has completed.
    * This can handle race conditions in setup/teardown.
+   * @param upload upload which has just completed.
    * @return the number of bytes which were transferred after the notification
    */
   public long uploadCompleted(ObjectTransfer upload) {

+ 127 - 63
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java

@@ -147,7 +147,6 @@ import org.apache.hadoop.fs.s3a.impl.S3AMultipartUploaderBuilder;
 import org.apache.hadoop.fs.s3a.impl.StatusProbeEnum;
 import org.apache.hadoop.fs.s3a.impl.StoreContext;
 import org.apache.hadoop.fs.s3a.impl.StoreContextBuilder;
-import org.apache.hadoop.fs.s3a.impl.V2Migration;
 import org.apache.hadoop.fs.s3a.prefetch.S3APrefetchingInputStream;
 import org.apache.hadoop.fs.s3a.tools.MarkerToolOperations;
 import org.apache.hadoop.fs.s3a.tools.MarkerToolOperationsImpl;
@@ -230,7 +229,7 @@ import static org.apache.hadoop.fs.s3a.Listing.toLocatedFileStatusIterator;
 import static org.apache.hadoop.fs.s3a.S3AUtils.*;
 import static org.apache.hadoop.fs.s3a.Statistic.*;
 import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.INITIALIZE_SPAN;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.createAWSCredentialProviderSet;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.createAWSCredentialProviderList;
 import static org.apache.hadoop.fs.s3a.auth.RolePolicies.STATEMENT_ALLOW_SSE_KMS_RW;
 import static org.apache.hadoop.fs.s3a.auth.RolePolicies.allowS3Operations;
 import static org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens.TokenIssuingPolicy.NoTokensAvailable;
@@ -294,6 +293,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
   private Path workingDir;
   private String username;
   private S3Client s3Client;
+  /** Async client is used for transfer manager and s3 select. */
   private S3AsyncClient s3AsyncClient;
   // initial callback policy is fail-once; it's there just to assist
   // some mock tests and other codepaths trying to call the low level
@@ -385,6 +385,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
 
   private AWSCredentialProviderList credentials;
   private SignerManager signerManager;
+  private S3AInternals s3aInternals;
 
   /**
    * Page size for deletions.
@@ -533,6 +534,8 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
       super.initialize(uri, conf);
       setConf(conf);
 
+      s3aInternals = createS3AInternals();
+
       // look for encryption data
       // DT Bindings may override this
       setEncryptionSecrets(
@@ -854,7 +857,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
 
     if(!trackDurationAndSpan(
         STORE_EXISTS_PROBE, bucket, null, () ->
-            invoker.retry("doestBucketExist", bucket, true, () -> {
+            invoker.retry("doesBucketExist", bucket, true, () -> {
               try {
                 if (BUCKET_REGIONS.containsKey(bucket)) {
                   return true;
@@ -927,7 +930,6 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
       // with it if so.
 
       LOG.debug("Using delegation tokens");
-      V2Migration.v1DelegationTokenCredentialProvidersUsed();
       S3ADelegationTokens tokens = new S3ADelegationTokens();
       this.delegationTokens = Optional.of(tokens);
       tokens.bindToFileSystem(getCanonicalUri(),
@@ -954,7 +956,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
       uaSuffix = tokens.getUserAgentField();
     } else {
       // DT support is disabled, so create the normal credential chain
-      credentials = createAWSCredentialProviderSet(name, conf);
+      credentials = createAWSCredentialProviderList(name, conf);
     }
     LOG.debug("Using credential provider {}", credentials);
     Class<? extends S3ClientFactory> s3ClientFactoryClass = conf.getClass(
@@ -988,7 +990,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
     S3ClientFactory clientFactory = ReflectionUtils.newInstance(s3ClientFactoryClass, conf);
     s3Client = clientFactory.createS3Client(getUri(), parameters);
     createS3AsyncClient(clientFactory, parameters);
-    transferManager =  clientFactory.createS3TransferManager(s3AsyncClient);
+    transferManager =  clientFactory.createS3TransferManager(getS3AsyncClient());
   }
 
   /**
@@ -999,7 +1001,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
    * @param parameters parameter object
    * @throws IOException on any IO problem
    */
-  private synchronized void createS3AsyncClient(S3ClientFactory clientFactory,
+  private void createS3AsyncClient(S3ClientFactory clientFactory,
       S3ClientFactory.S3ClientCreationParameters parameters) throws IOException {
     s3AsyncClient = clientFactory.createS3AsyncClient(getUri(), parameters);
   }
@@ -1202,6 +1204,14 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
     return requestFactory;
   }
 
+  /**
+   * Get the S3 Async client; synchronized to keep spotbugs quiet.
+   * @return the async s3 client.
+   */
+  private S3AsyncClient getS3AsyncClient() {
+    return s3AsyncClient;
+  }
+
   /**
    * Implementation of all operations used by delegation tokens.
    */
@@ -1341,19 +1351,6 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
     return 0;
   }
 
-  /**
-   * Returns the S3 client used by this filesystem.
-   * <i>Warning: this must only be used for testing, as it bypasses core
-   * S3A operations. </i>
-   * @param reason a justification for requesting access.
-   * @return S3Client
-   */
-  @VisibleForTesting
-  public S3Client getAmazonS3ClientForTesting(String reason) {
-    LOG.warn("Access to S3 client requested, reason {}", reason);
-    return s3Client;
-  }
-
   /**
    * Set the client -used in mocking tests to force in a different client.
    * @param client client.
@@ -1365,45 +1362,112 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
   }
 
   /**
-   * Get the region of a bucket.
-   * Invoked from StoreContext; consider an entry point.
-   * @return the region in which a bucket is located
-   * @throws AccessDeniedException if the caller lacks permission.
-   * @throws IOException on any failure.
+   * S3AInternals method.
+   * {@inheritDoc}.
    */
+  @AuditEntryPoint
   @Retries.RetryTranslated
-  @InterfaceAudience.LimitedPrivate("diagnostics")
   public String getBucketLocation() throws IOException {
-    return getBucketLocation(bucket);
+    return s3aInternals.getBucketLocation(bucket);
   }
 
   /**
-   * Get the region of a bucket; fixing up the region so it can be used
-   * in the builders of other AWS clients.
-   * TODO: Review. Used only for S3Guard?
-   * Requires the caller to have the AWS role permission
-   * {@code s3:GetBucketLocation}.
-   * Retry policy: retrying, translated.
-   * @param bucketName the name of the bucket
-   * @return the region in which a bucket is located
-   * @throws AccessDeniedException if the caller lacks permission.
-   * @throws IOException on any failure.
+   * Create the S3AInternals; left as something mocking
+   * subclasses may want to override.
+   * @return the internal implementation
    */
-  @VisibleForTesting
-  @AuditEntryPoint
-  @Retries.RetryTranslated
-  public String getBucketLocation(String bucketName) throws IOException {
-    final String region = trackDurationAndSpan(
-        STORE_EXISTS_PROBE, bucketName, null, () ->
-            invoker.retry("getBucketLocation()", bucketName, true, () ->
-                // If accessPoint then region is known from Arn
-                accessPoint != null
-                    ? accessPoint.getRegion()
-                    : s3Client.getBucketLocation(GetBucketLocationRequest.builder()
-                        .bucket(bucketName)
-                        .build())
-                    .locationConstraintAsString()));
-    return fixBucketRegion(region);
+  protected S3AInternals createS3AInternals() {
+    return new S3AInternalsImpl();
+  }
+
+  /**
+   * Get the S3AInternals.
+   * @return the internal implementation
+   */
+  public S3AInternals getS3AInternals() {
+    return s3aInternals;
+  }
+
+  /**
+   * Implementation of the S3A Internals operations; pulled out of S3AFileSystem to
+   * force code accessing it to call {@link #getS3AInternals()}.
+   */
+  private final class S3AInternalsImpl implements S3AInternals {
+
+    @Override
+    public S3Client getAmazonS3V2ClientForTesting(String reason) {
+      LOG.debug("Access to S3 client requested, reason {}", reason);
+      return s3Client;
+    }
+
+    /**
+     * S3AInternals method.
+     * {@inheritDoc}.
+     */
+    @Override
+    @AuditEntryPoint
+    @Retries.RetryTranslated
+    public String getBucketLocation() throws IOException {
+      return s3aInternals.getBucketLocation(bucket);
+    }
+
+    /**
+     * S3AInternals method.
+     * {@inheritDoc}.
+     */
+    @Override
+    @AuditEntryPoint
+    @Retries.RetryTranslated
+    public String getBucketLocation(String bucketName) throws IOException {
+      final String region = trackDurationAndSpan(
+          STORE_EXISTS_PROBE, bucketName, null, () ->
+              invoker.retry("getBucketLocation()", bucketName, true, () ->
+                  // If accessPoint then region is known from Arn
+                  accessPoint != null
+                      ? accessPoint.getRegion()
+                      : s3Client.getBucketLocation(GetBucketLocationRequest.builder()
+                          .bucket(bucketName)
+                          .build())
+                      .locationConstraintAsString()));
+      return fixBucketRegion(region);
+    }
+
+    /**
+     * S3AInternals method.
+     * {@inheritDoc}.
+     */
+    @Override
+    @AuditEntryPoint
+    @Retries.RetryTranslated
+    public HeadObjectResponse getObjectMetadata(Path path) throws IOException {
+      return trackDurationAndSpan(INVOCATION_GET_FILE_STATUS, path, () ->
+          S3AFileSystem.this.getObjectMetadata(makeQualified(path), null, invoker,
+              "getObjectMetadata"));
+    }
+
+    /**
+     * S3AInternals method.
+     * {@inheritDoc}.
+     */
+    @Override
+    @AuditEntryPoint
+    @Retries.RetryTranslated
+    public HeadBucketResponse getBucketMetadata() throws IOException {
+      return S3AFileSystem.this.getBucketMetadata();
+    }
+
+    /**
+     * Get a shared copy of the AWS credentials, with its reference
+     * counter updated.
+     * Caller is required to call {@code close()} on this after
+     * they have finished using it.
+     * @param purpose what is this for? This is initially for logging
+     * @return a reference to shared credentials.
+     */
+    public AWSCredentialProviderList shareCredentials(final String purpose) {
+      LOG.debug("Sharing credentials for: {}", purpose);
+      return credentials.share();
+    }
   }
 
   /**
@@ -1426,7 +1490,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
   }
 
   /**
-   * Get the encryption algorithm of this endpoint.
+   * Get the encryption algorithm of this connector.
    * @return the encryption algorithm.
    */
   public S3AEncryptionMethods getS3EncryptionAlgorithm() {
@@ -1473,6 +1537,8 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
    * Get the bucket of this filesystem.
    * @return the bucket
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Stable
   public String getBucket() {
     return bucket;
   }
@@ -1761,7 +1827,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
     public CompletableFuture<Void> selectObjectContent(
         SelectObjectContentRequest request,
         SelectObjectContentResponseHandler responseHandler) {
-      return s3AsyncClient.selectObjectContent(request, responseHandler);
+      return getS3AsyncClient().selectObjectContent(request, responseHandler);
     }
 
     @Override
@@ -2466,21 +2532,17 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
    * Low-level call to get at the object metadata.
    * This method is used in some external applications and so
    * must be viewed as a public entry point.
-   * Auditing: An audit entry point.
+   * @deprecated use S3AInternals API.
    * @param path path to the object. This will be qualified.
    * @return metadata
    * @throws IOException IO and object access problems.
    */
-  @VisibleForTesting
   @AuditEntryPoint
   @InterfaceAudience.LimitedPrivate("utilities")
   @Retries.RetryTranslated
-  @InterfaceStability.Evolving
+  @Deprecated
   public HeadObjectResponse getObjectMetadata(Path path) throws IOException {
-    V2Migration.v1GetObjectMetadataCalled();
-    return trackDurationAndSpan(INVOCATION_GET_FILE_STATUS, path, () ->
-        getObjectMetadata(makeQualified(path), null, invoker,
-            "getObjectMetadata"));
+    return getS3AInternals().getObjectMetadata(path);
   }
 
   /**
@@ -2767,7 +2829,8 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
    * @throws UnknownStoreException the bucket is absent
    * @throws IOException  any other problem talking to S3
    */
-  @Retries.RetryRaw
+  @AuditEntryPoint
+  @Retries.RetryTranslated
   protected HeadBucketResponse getBucketMetadata() throws IOException {
     final HeadBucketResponse response = trackDurationAndSpan(STORE_EXISTS_PROBE, bucket, null,
         () -> invoker.retry("getBucketMetadata()", bucket, true, () -> {
@@ -4175,7 +4238,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
   protected synchronized void stopAllServices() {
     closeAutocloseables(LOG, transferManager,
         s3Client,
-        s3AsyncClient);
+        getS3AsyncClient());
     transferManager = null;
     s3Client = null;
     s3AsyncClient = null;
@@ -5563,4 +5626,5 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities,
   public boolean isMultipartUploadEnabled() {
     return isMultipartUploadEnabled;
   }
+
 }

+ 110 - 0
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInternals.java

@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import java.io.IOException;
+import java.nio.file.AccessDeniedException;
+
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.HeadBucketResponse;
+import software.amazon.awssdk.services.s3.model.HeadObjectResponse;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.store.audit.AuditEntryPoint;
+
+/**
+ * This is an unstable interface for access to S3A Internal state, S3 operations
+ * and the S3 client connector itself.
+ */
+@InterfaceStability.Unstable
+@InterfaceAudience.LimitedPrivate("testing/diagnostics")
+public interface S3AInternals {
+
+  /**
+   * Returns the S3 client used by this filesystem.
+   * Will log once first, to discourage use.
+   * <i>Warning: this must only be used for testing, as it bypasses core
+   * S3A operations. </i>
+   * Mocking note: this is the same s3client as is used by the owning
+   * filesystem; changes to this client will be reflected by changes
+   * in the behavior of that filesystem.
+   * @param reason a justification for requesting access.
+   * @return S3Client
+   */
+  S3Client getAmazonS3V2ClientForTesting(String reason);
+
+  /**
+   * Get the region of a bucket.
+   * Invoked from StoreContext; consider an entry point.
+   * @return the region in which a bucket is located
+   * @throws AccessDeniedException if the caller lacks permission.
+   * @throws IOException on any failure.
+   */
+  @Retries.RetryTranslated
+  @AuditEntryPoint
+  String getBucketLocation() throws IOException;
+
+  /**
+   * Get the region of a bucket; fixing up the region so it can be used
+   * in the builders of other AWS clients.
+   * Requires the caller to have the AWS role permission
+   * {@code s3:GetBucketLocation}.
+   * Retry policy: retrying, translated.
+   * @param bucketName the name of the bucket
+   * @return the region in which a bucket is located
+   * @throws AccessDeniedException if the caller lacks permission.
+   * @throws IOException on any failure.
+   */
+  @AuditEntryPoint
+  @Retries.RetryTranslated
+  String getBucketLocation(String bucketName) throws IOException;
+
+  /**
+   * Low-level call to get at the object metadata.
+   * Auditing: An audit entry point.
+   * @param path path to the object. This will be qualified.
+   * @return metadata
+   * @throws IOException IO and object access problems.
+   */
+  @AuditEntryPoint
+  @Retries.RetryTranslated
+  HeadObjectResponse getObjectMetadata(Path path) throws IOException;
+
+  /**
+   * Get a shared copy of the AWS credentials, with its reference
+   * counter updated.
+   * Caller is required to call {@code close()} on this after
+   * they have finished using it.
+   * @param purpose what is this for? This is for logging
+   * @return a reference to shared credentials.
+   */
+  AWSCredentialProviderList shareCredentials(String purpose);
+
+  /**
+   * Request bucket metadata.
+   * @return the metadata
+   * @throws UnknownStoreException the bucket is absent
+   * @throws IOException  any other problem talking to S3
+   */
+  @AuditEntryPoint
+  @Retries.RetryTranslated
+  HeadBucketResponse getBucketMetadata() throws IOException;
+}

+ 23 - 18
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java

@@ -76,6 +76,10 @@ import java.util.concurrent.ExecutionException;
 import static org.apache.commons.lang3.StringUtils.isEmpty;
 import static org.apache.hadoop.fs.s3a.Constants.*;
 import static org.apache.hadoop.fs.s3a.impl.ErrorTranslation.isUnknownBucket;
+import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.instantiationException;
+import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.isAbstract;
+import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.isNotInstanceOf;
+import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.unsupportedConstructor;
 import static org.apache.hadoop.fs.s3a.impl.InternalConstants.*;
 import static org.apache.hadoop.io.IOUtils.cleanupWithLogger;
 import static org.apache.hadoop.util.functional.RemoteIterators.filteringRemoteIterator;
@@ -88,9 +92,6 @@ import static org.apache.hadoop.util.functional.RemoteIterators.filteringRemoteI
 public final class S3AUtils {
 
   private static final Logger LOG = LoggerFactory.getLogger(S3AUtils.class);
-  static final String CONSTRUCTOR_EXCEPTION = "constructor exception";
-  static final String INSTANTIATION_EXCEPTION
-      = "instantiation exception";
 
   static final String ENDPOINT_KEY = "Endpoint";
 
@@ -535,7 +536,7 @@ public final class S3AUtils {
     return date.getTime();
   }
 
-  /***
+  /**
    * Creates an instance of a class using reflection. The
    * class must implement one of the following means of construction, which are
    * attempted in order:
@@ -551,7 +552,7 @@ public final class S3AUtils {
    * <li>a public default constructor.</li>
    * </ol>
    *
-   * @param instanceClass Class for which instance is to be created
+   * @param className name of class for which instance is to be created
    * @param conf configuration
    * @param uri URI of the FS
    * @param interfaceImplemented interface that this class implements
@@ -562,14 +563,22 @@ public final class S3AUtils {
    * @throws IOException on any problem
    */
   @SuppressWarnings("unchecked")
-  public static <InstanceT> InstanceT getInstanceFromReflection(Class<?> instanceClass,
-      Configuration conf, @Nullable URI uri, Class<?> interfaceImplemented, String methodName,
+  public static <InstanceT> InstanceT getInstanceFromReflection(String className,
+      Configuration conf,
+      @Nullable URI uri,
+      Class<? extends InstanceT> interfaceImplemented,
+      String methodName,
       String configKey) throws IOException {
-
-    String className = instanceClass.getName();
-
     try {
-      Constructor cons = null;
+      Class<?> instanceClass = S3AUtils.class.getClassLoader().loadClass(className);
+      if (Modifier.isAbstract(instanceClass.getModifiers())) {
+        throw isAbstract(uri, className, configKey);
+      }
+      if (!interfaceImplemented.isAssignableFrom(instanceClass)) {
+        throw isNotInstanceOf(uri, className, interfaceImplemented.getName(), configKey);
+
+      }
+      Constructor cons;
       if (conf != null) {
         // new X(uri, conf)
         cons = getConstructor(instanceClass, URI.class, Configuration.class);
@@ -597,10 +606,7 @@ public final class S3AUtils {
       }
 
       // no supported constructor or factory method found
-      throw new IOException(String.format("%s " + CONSTRUCTOR_EXCEPTION
-          + ".  A class specified in %s must provide a public constructor "
-          + "of a supported signature, or a public factory method named "
-          + "create that accepts no arguments.", className, configKey));
+      throw unsupportedConstructor(uri, className, configKey);
     } catch (InvocationTargetException e) {
       Throwable targetException = e.getTargetException();
       if (targetException == null) {
@@ -612,12 +618,11 @@ public final class S3AUtils {
         throw translateException("Instantiate " + className, "", (SdkException) targetException);
       } else {
         // supported constructor or factory method found, but the call failed
-        throw new IOException(className + " " + INSTANTIATION_EXCEPTION + ": " + targetException,
-            targetException);
+        throw instantiationException(uri, className, configKey, targetException);
       }
     } catch (ReflectiveOperationException | IllegalArgumentException e) {
       // supported constructor or factory method found, but the call failed
-      throw new IOException(className + " " + INSTANTIATION_EXCEPTION + ": " + e, e);
+      throw instantiationException(uri, className, configKey, e);
     }
   }
 

+ 7 - 4
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/SimpleAWSCredentialsProvider.java

@@ -60,7 +60,7 @@ public class SimpleAWSCredentialsProvider implements AwsCredentialsProvider {
    */
   public SimpleAWSCredentialsProvider(final URI uri, final Configuration conf)
       throws IOException {
-      this(getAWSAccessKeys(uri, conf));
+    this(getAWSAccessKeys(uri, conf));
   }
 
   /**
@@ -72,8 +72,8 @@ public class SimpleAWSCredentialsProvider implements AwsCredentialsProvider {
   @VisibleForTesting
   SimpleAWSCredentialsProvider(final S3xLoginHelper.Login login)
       throws IOException {
-      this.accessKey = login.getUser();
-      this.secretKey = login.getPassword();
+    this.accessKey = login.getUser();
+    this.secretKey = login.getPassword();
   }
 
   @Override
@@ -87,7 +87,10 @@ public class SimpleAWSCredentialsProvider implements AwsCredentialsProvider {
 
   @Override
   public String toString() {
-    return getClass().getSimpleName();
+    return "SimpleAWSCredentialsProvider{" +
+        "accessKey.empty=" + accessKey.isEmpty() +
+        ", secretKey.empty'" + secretKey.isEmpty() +
+        '}';
   }
 
 }

+ 44 - 1
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Tristate.java

@@ -18,15 +18,58 @@
 
 package org.apache.hadoop.fs.s3a;
 
+import java.util.Optional;
+
+import static java.util.Optional.empty;
+import static java.util.Optional.of;
+
 /**
  * Simple enum to express {true, false, don't know}.
  */
 public enum Tristate {
+
   // Do not add additional values here.  Logic will assume there are exactly
   // three possibilities.
-  TRUE, FALSE, UNKNOWN;
+  TRUE(of(Boolean.TRUE)),
+  FALSE(of(Boolean.FALSE)),
+  UNKNOWN(empty());
+
+  /**
+   *  Mapping to an optional boolean.
+   */
+  @SuppressWarnings("NonSerializableFieldInSerializableClass")
+  private final Optional<Boolean> mapping;
+
+  Tristate(final Optional<Boolean> t) {
+    mapping = t;
+  }
+
+  /**
+   * Get the boolean mapping, if present.
+   * @return the boolean value, if present.
+   */
+  public Optional<Boolean> getMapping() {
+    return mapping;
+  }
+
+  /**
+   * Does this value map to a boolean.
+   * @return true if the state is one of true or false.
+   */
+  public boolean isBoolean() {
+    return mapping.isPresent();
+  }
 
   public static Tristate fromBool(boolean v) {
     return v ? TRUE : FALSE;
   }
+
+  /**
+   * Build a tristate from a boolean.
+   * @param b source optional
+   * @return a tristate derived from the argument.
+   */
+  public static Tristate from(Optional<Boolean> b) {
+    return b.map(Tristate::fromBool).orElse(UNKNOWN);
+  }
 }

+ 118 - 0
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/AwsV1BindingSupport.java

@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a.adapter;
+
+import java.io.IOException;
+import java.net.URI;
+import javax.annotation.Nullable;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
+
+import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.unavailable;
+
+/**
+ * Binding support; the sole way which the rest of the code should instantiate v1 SDK libraries.
+ * Uses this class's Classloader for its analysis/loading.
+ */
+@SuppressWarnings("StaticNonFinalField")
+public final class AwsV1BindingSupport {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      AwsV1BindingSupport.class);
+
+  /**
+   * V1 credential provider classname: {@code}.
+   */
+  public static final String CREDENTIAL_PROVIDER_CLASSNAME =
+      "com.amazonaws.auth.AWSCredentialsProvider";
+
+  /**
+   * SDK availability.
+   */
+  private static final boolean SDK_V1_FOUND = checkForAwsV1Sdk();
+
+  private AwsV1BindingSupport() {
+  }
+
+  /**
+   * Probe for the AWS v1 SDK being available by looking for
+   * the class {@link #CREDENTIAL_PROVIDER_CLASSNAME}.
+   * @return true if it was found in the classloader
+   */
+  private static boolean checkForAwsV1Sdk() {
+
+    try {
+      ClassLoader cl = AwsV1BindingSupport.class.getClassLoader();
+      cl.loadClass(CREDENTIAL_PROVIDER_CLASSNAME);
+      LOG.debug("v1 SDK class {} found", CREDENTIAL_PROVIDER_CLASSNAME);
+      return true;
+    } catch (Exception e) {
+      LOG.debug("v1 SDK class {} not found", CREDENTIAL_PROVIDER_CLASSNAME, e);
+      return false;
+    }
+  }
+
+  /**
+   * Is the AWS v1 SDK available?
+   * @return true if it was found in the classloader
+   */
+  public static synchronized boolean isAwsV1SdkAvailable() {
+    return SDK_V1_FOUND;
+  }
+
+
+  /**
+   * Create an AWS credential provider from its class by using reflection.  The
+   * class must implement one of the following means of construction, which are
+   * attempted in order:
+   *
+   * <ol>
+   * <li>a public constructor accepting java.net.URI and
+   *     org.apache.hadoop.conf.Configuration</li>
+   * <li>a public constructor accepting
+   *    org.apache.hadoop.conf.Configuration</li>
+   * <li>a public static method named getInstance that accepts no
+   *    arguments and returns an instance of
+   *    com.amazonaws.auth.AWSCredentialsProvider, or</li>
+   * <li>a public default constructor.</li>
+   * </ol>
+   * @param conf configuration
+   * @param className credential classname
+   * @param uri URI of the FS
+   * @param key configuration key to use
+   * @return the instantiated class
+   * @throws InstantiationIOException on any instantiation failure, including v1 SDK not found
+   * @throws IOException anything else.
+   */
+  public static AwsCredentialsProvider createAWSV1CredentialProvider(
+      Configuration conf,
+      String className,
+      @Nullable URI uri,
+      final String key) throws IOException {
+    if (!isAwsV1SdkAvailable()) {
+      throw unavailable(uri, className, key, "No AWS v1 SDK available");
+    }
+    return V1ToV2AwsCredentialProviderAdapter.create(conf, className, uri);
+  }
+}

+ 103 - 12
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1ToV2AwsCredentialProviderAdapter.java

@@ -18,31 +18,90 @@
 
 package org.apache.hadoop.fs.s3a.adapter;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.URI;
+import javax.annotation.Nullable;
+
+import com.amazonaws.SdkClientException;
 import com.amazonaws.auth.AWSCredentials;
 import com.amazonaws.auth.AWSCredentialsProvider;
 import com.amazonaws.auth.AWSSessionCredentials;
 import com.amazonaws.auth.AnonymousAWSCredentials;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
 import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
 import software.amazon.awssdk.auth.credentials.AwsCredentials;
 import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
 import software.amazon.awssdk.auth.credentials.AwsSessionCredentials;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.s3a.CredentialInitializationException;
+import org.apache.hadoop.fs.s3a.S3AUtils;
+import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
+
+import static java.util.Objects.requireNonNull;
+import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
+
 /**
  * Adapts a V1 {@link AWSCredentialsProvider} to the V2 {@link AwsCredentialsProvider} interface.
- * Implements both interfaces so can be used with either the V1 or V2 AWS SDK.
  */
-final class V1ToV2AwsCredentialProviderAdapter implements V1V2AwsCredentialProviderAdapter {
+public final class V1ToV2AwsCredentialProviderAdapter
+    implements AwsCredentialsProvider, Closeable {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      V1ToV2AwsCredentialProviderAdapter.class);
 
+  /**
+   * The V1 credential provider constructed.
+   */
   private final AWSCredentialsProvider v1CredentialsProvider;
 
+
   private V1ToV2AwsCredentialProviderAdapter(AWSCredentialsProvider v1CredentialsProvider) {
-    this.v1CredentialsProvider = v1CredentialsProvider;
+    this.v1CredentialsProvider = requireNonNull(v1CredentialsProvider);
   }
 
+
+  /**
+   * Collect v1 credentials and convert to v2.
+   * @return v2 credentials
+   * @throws CredentialInitializationException if the inner retrieval raised an exception
+   */
   @Override
   public AwsCredentials resolveCredentials() {
-    AWSCredentials toAdapt = v1CredentialsProvider.getCredentials();
+    try {
+      // get the wrapped credentials
+      AWSCredentials toAdapt = v1CredentialsProvider.getCredentials();
+      return convertToV2Credentials(toAdapt);
+    } catch (SdkClientException e) {
+      // wrap with a v2 exception so that code which adds a try/catch for v2 sdk exceptions
+      // gets a compatible exception.
+      throw new CredentialInitializationException(e.toString(), e);
+    }
+  }
+
+  /**
+   * Close the wrapped provider if it implements Closeable/AutoCloseable.
+   * @throws IOException failure
+   */
+  @Override
+  public void close() throws IOException {
+    if (v1CredentialsProvider instanceof Closeable) {
+      ((Closeable) v1CredentialsProvider).close();
+    } else if (v1CredentialsProvider instanceof AutoCloseable) {
+      S3AUtils.closeAutocloseables(LOG, (AutoCloseable) v1CredentialsProvider);
+    }
+  }
+
+  /**
+   * Convert v1 credentials to v2, including support for session and anonymous
+   * credentials.
+   * @param toAdapt credentials to adapt.
+   * @return v2 credentials.
+   */
+  static AwsCredentials convertToV2Credentials(final AWSCredentials toAdapt) {
     if (toAdapt instanceof AWSSessionCredentials) {
       return AwsSessionCredentials.create(toAdapt.getAWSAccessKeyId(),
           toAdapt.getAWSSecretKey(),
@@ -55,20 +114,52 @@ final class V1ToV2AwsCredentialProviderAdapter implements V1V2AwsCredentialProvi
   }
 
   @Override
-  public AWSCredentials getCredentials() {
-    return v1CredentialsProvider.getCredentials();
-  }
-
-  @Override
-  public void refresh() {
-    v1CredentialsProvider.refresh();
+  public String toString() {
+    return "V1ToV2AwsCredentialProviderAdapter{" +
+        "v1CredentialsProvider=" + v1CredentialsProvider +
+        '}';
   }
 
   /**
    * @param v1CredentialsProvider V1 credential provider to adapt.
    * @return A new instance of the credentials provider adapter.
    */
-  static V1ToV2AwsCredentialProviderAdapter create(AWSCredentialsProvider v1CredentialsProvider) {
+  static AwsCredentialsProvider create(AWSCredentialsProvider v1CredentialsProvider) {
     return new V1ToV2AwsCredentialProviderAdapter(v1CredentialsProvider);
   }
+
+  /**
+   * Create an AWS credential provider from its class by using reflection.  The
+   * class must implement one of the following means of construction, which are
+   * attempted in order:
+   *
+   * <ol>
+   * <li>a public constructor accepting java.net.URI and
+   *     org.apache.hadoop.conf.Configuration</li>
+   * <li>a public constructor accepting
+   *    org.apache.hadoop.conf.Configuration</li>
+   * <li>a public static method named getInstance that accepts no
+   *    arguments and returns an instance of
+   *    com.amazonaws.auth.AWSCredentialsProvider, or</li>
+   * <li>a public default constructor.</li>
+   * </ol>
+   * @param conf configuration
+   * @param className classname
+   * @param uri URI of the FS
+   * @return the instantiated class
+   * @throws InstantiationIOException on construction and instantiation failures,
+   * including v1 SDK exceptions.
+   * @throws IOException if raised by a constructor/factory method.
+   */
+  static AwsCredentialsProvider create(
+      Configuration conf,
+      String className,
+      @Nullable URI uri) throws InstantiationIOException, IOException {
+
+    final AWSCredentialsProvider instance =
+        S3AUtils.getInstanceFromReflection(className, conf, uri, AWSCredentialsProvider.class,
+            "getInstance", AWS_CREDENTIALS_PROVIDER);
+    return create(instance);
+  }
+
 }

+ 0 - 36
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1V2AwsCredentialProviderAdapter.java

@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3a.adapter;
-
-import com.amazonaws.auth.AWSCredentialsProvider;
-import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
-
-public interface V1V2AwsCredentialProviderAdapter extends AWSCredentialsProvider,
-    AwsCredentialsProvider {
-
-  /**
-   * Creates a two-way adapter from a V1 {@link AWSCredentialsProvider} interface.
-   *
-   * @param v1CredentialsProvider V1 credentials provider.
-   * @return Two-way credential provider adapter.
-   */
-  static V1V2AwsCredentialProviderAdapter adapt(AWSCredentialsProvider v1CredentialsProvider) {
-    return V1ToV2AwsCredentialProviderAdapter.create(v1CredentialsProvider);
-  }
-}

+ 4 - 0
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/package-info.java

@@ -18,6 +18,10 @@
 
 /**
  * Adapter classes for allowing V1 credential providers to be used with SDKV2.
+ * This is the only package where use of aws v1 classes are permitted;
+ * all instantiations of objects here must use reflection to probe for
+ * availability or be prepared to catch exceptions which may be raised
+ * if the v1 SDK isn't found on the classpath
  */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable

+ 5 - 1
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/api/RequestFactory.java

@@ -41,13 +41,15 @@ import software.amazon.awssdk.services.s3.model.SelectObjectContentRequest;
 import software.amazon.awssdk.services.s3.model.StorageClass;
 import software.amazon.awssdk.services.s3.model.UploadPartRequest;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.PathIOException;
 import org.apache.hadoop.fs.s3a.S3AEncryptionMethods;
 import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets;
 import org.apache.hadoop.fs.s3a.impl.PutObjectOptions;
 
 /**
- * Factory for S3 objects.
+ * Factory for S3 request objects.
  *
  * This is where the owner FS's {@code prepareRequest()}
  * callback is invoked to mark up a request for this span.
@@ -61,6 +63,8 @@ import org.apache.hadoop.fs.s3a.impl.PutObjectOptions;
  * as there are no guarantees how they are processed.
  * That is: no guarantees of retry or translation.
  */
+@InterfaceStability.Unstable
+@InterfaceAudience.LimitedPrivate("testing/diagnostics")
 public interface RequestFactory {
 
   /**

+ 16 - 7
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/ActiveAuditManagerS3A.java

@@ -33,7 +33,9 @@ import software.amazon.awssdk.core.interceptor.ExecutionInterceptor;
 import software.amazon.awssdk.http.SdkHttpRequest;
 import software.amazon.awssdk.http.SdkHttpResponse;
 import software.amazon.awssdk.transfer.s3.progress.TransferListener;
-import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.util.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -404,19 +406,26 @@ public final class ActiveAuditManagerS3A
     List<ExecutionInterceptor> executionInterceptors = new ArrayList<>();
     executionInterceptors.add(this);
 
-    final String handlers = getConfig().get(AUDIT_REQUEST_HANDLERS);
-    if (handlers != null) {
-      V2Migration.v1RequestHandlersUsed();
+    final String handlers = getConfig().getTrimmed(AUDIT_REQUEST_HANDLERS, "");
+    if (!handlers.isEmpty()) {
+      // warn and ignore v1 handlers.
+      V2Migration.v1RequestHandlersUsed(handlers);
     }
 
-    // TODO: should we remove this and use Global/Service interceptors, see:
-    //  https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html
+    // V2 SDK supports global/service interceptors, but they need to be configured on the
+    // classpath and don't get the filesystem/job configuration passed down.
     final Class<?>[] interceptors = getConfig().getClasses(AUDIT_EXECUTION_INTERCEPTORS);
     if (interceptors != null) {
       for (Class<?> handler : interceptors) {
         try {
+          LOG.debug("Adding intercept of class {}", handler);
           Constructor<?> ctor = handler.getConstructor();
-          executionInterceptors.add((ExecutionInterceptor) ctor.newInstance());
+          final ExecutionInterceptor interceptor = (ExecutionInterceptor) ctor.newInstance();
+          if (interceptor instanceof Configurable) {
+            // pass in the configuration.
+            ((Configurable) interceptor).setConf(getConfig());
+          }
+          executionInterceptors.add(interceptor);
         } catch (ExceptionInInitializerError e) {
           throw FutureIO.unwrapInnerException(e);
         } catch (Exception e) {

+ 14 - 15
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java

@@ -34,7 +34,6 @@ import software.amazon.awssdk.services.sts.StsClient;
 import software.amazon.awssdk.services.sts.auth.StsAssumeRoleCredentialsProvider;
 import software.amazon.awssdk.services.sts.model.AssumeRoleRequest;
 import software.amazon.awssdk.services.sts.model.StsException;
-import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,7 +41,9 @@ import org.slf4j.LoggerFactory;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.PathIOException;
 import org.apache.hadoop.fs.s3a.AWSCredentialProviderList;
 import org.apache.hadoop.fs.s3a.CredentialInitializationException;
 import org.apache.hadoop.fs.s3a.Retries;
@@ -53,7 +54,7 @@ import org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import static org.apache.hadoop.fs.s3a.Constants.*;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.buildAWSProviderList;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.buildAWSProviderList;
 
 /**
  * Support IAM Assumed roles by instantiating an instance of
@@ -66,7 +67,7 @@ import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.buildAWSPr
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class AssumedRoleCredentialProvider implements AwsCredentialsProvider,
+public final class AssumedRoleCredentialProvider implements AwsCredentialsProvider,
     Closeable {
 
   private static final Logger LOG =
@@ -106,7 +107,7 @@ public class AssumedRoleCredentialProvider implements AwsCredentialsProvider,
 
     arn = conf.getTrimmed(ASSUMED_ROLE_ARN, "");
     if (StringUtils.isEmpty(arn)) {
-      throw new IOException(E_NO_ROLE);
+      throw new PathIOException(String.valueOf(fsUri), E_NO_ROLE);
     }
 
     // build up the base provider
@@ -115,8 +116,8 @@ public class AssumedRoleCredentialProvider implements AwsCredentialsProvider,
         Arrays.asList(
             SimpleAWSCredentialsProvider.class,
             EnvironmentVariableCredentialsProvider.class),
-        Sets.newHashSet(this.getClass()));
-    LOG.debug("Credentials to obtain role credentials: {}", credentialsToSTS);
+        Sets.newHashSet(getClass()));
+    LOG.debug("Credentials used to obtain role credentials: {}", credentialsToSTS);
 
     // then the STS binding
     sessionName = conf.getTrimmed(ASSUMED_ROLE_SESSION_NAME,
@@ -170,7 +171,7 @@ public class AssumedRoleCredentialProvider implements AwsCredentialsProvider,
   @Retries.RetryRaw
   public AwsCredentials resolveCredentials() {
     try {
-      return invoker.retryUntranslated("getCredentials",
+      return invoker.retryUntranslated("resolveCredentials",
           true,
           stsProvider::resolveCredentials);
     } catch (IOException e) {
@@ -182,7 +183,7 @@ public class AssumedRoleCredentialProvider implements AwsCredentialsProvider,
           "getCredentials failed: " + e,
           e);
     } catch (SdkClientException e) {
-      LOG.error("Failed to get credentials for role {}",
+      LOG.error("Failed to resolve credentials for role {}",
           arn, e);
       throw e;
     }
@@ -198,13 +199,11 @@ public class AssumedRoleCredentialProvider implements AwsCredentialsProvider,
 
   @Override
   public String toString() {
-    final StringBuilder sb = new StringBuilder(
-        "AssumedRoleCredentialProvider{");
-    sb.append("role='").append(arn).append('\'');
-    sb.append(", session'").append(sessionName).append('\'');
-    sb.append(", duration=").append(duration);
-    sb.append('}');
-    return sb.toString();
+    String sb = "AssumedRoleCredentialProvider{" + "role='" + arn + '\''
+        + ", session'" + sessionName + '\''
+        + ", duration=" + duration
+        + '}';
+    return sb;
   }
 
   /**

+ 0 - 283
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AwsCredentialListProvider.java

@@ -1,283 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3a.auth;
-
-import java.io.IOException;
-import java.lang.reflect.Modifier;
-import java.net.URI;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import javax.annotation.Nullable;
-
-import com.amazonaws.auth.AWSCredentialsProvider;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
-import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider;
-
-import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.s3a.AWSCredentialProviderList;
-import org.apache.hadoop.fs.s3a.Constants;
-import org.apache.hadoop.fs.s3a.S3AUtils;
-import org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider;
-import org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider;
-import org.apache.hadoop.fs.s3native.S3xLoginHelper;
-
-import static org.apache.hadoop.fs.s3a.Constants.AWS_AUTH_CLASS_PREFIX;
-import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
-
-/**
- * This class provides methods to create the list of AWS credential providers.
- */
-public final class AwsCredentialListProvider {
-
-  private AwsCredentialListProvider() {
-  }
-
-  private static final Logger LOG = LoggerFactory.getLogger(AwsCredentialListProvider.class);
-
-  public static final String NOT_AWS_PROVIDER =
-      "does not implement AWSCredentialsProvider";
-  public static final String NOT_AWS_V2_PROVIDER =
-      "does not implement AwsCredentialsProvider";
-  public static final String ABSTRACT_PROVIDER =
-      "is abstract and therefore cannot be created";
-
-  /**
-   * Error message when the AWS provider list built up contains a forbidden
-   * entry.
-   */
-  @VisibleForTesting
-  public static final String E_FORBIDDEN_AWS_PROVIDER
-      = "AWS provider class cannot be used";
-
-  /**
-   * The standard AWS provider list for AWS connections.
-   */
-  public static final List<Class<?>>
-      STANDARD_AWS_PROVIDERS = Collections.unmodifiableList(
-      Arrays.asList(
-          TemporaryAWSCredentialsProvider.class,
-          SimpleAWSCredentialsProvider.class,
-          EnvironmentVariableCredentialsProvider.class,
-          IAMInstanceCredentialsProvider.class));
-
-  /**
-   * Create the AWS credentials from the providers, the URI and
-   * the key {@link Constants#AWS_CREDENTIALS_PROVIDER} in the configuration.
-   * @param binding Binding URI -may be null
-   * @param conf filesystem configuration
-   * @return a credentials provider list
-   * @throws IOException Problems loading the providers (including reading
-   * secrets from credential files).
-   */
-  public static AWSCredentialProviderList createAWSCredentialProviderSet(
-      @Nullable URI binding,
-      Configuration conf) throws IOException {
-    // this will reject any user:secret entries in the URI
-    S3xLoginHelper.rejectSecretsInURIs(binding);
-    AWSCredentialProviderList credentials =
-        buildAWSProviderList(binding,
-            conf,
-            AWS_CREDENTIALS_PROVIDER,
-            STANDARD_AWS_PROVIDERS,
-            new HashSet<>());
-    // make sure the logging message strips out any auth details
-    LOG.debug("For URI {}, using credentials {}",
-        binding, credentials);
-    return credentials;
-  }
-
-  /**
-   * Load list of AWS credential provider/credential provider factory classes.
-   * @param conf configuration
-   * @param key key
-   * @param defaultValue list of default values
-   * @return the list of classes, possibly empty
-   * @throws IOException on a failure to load the list.
-   */
-  private static List<Class<?>> loadAWSProviderClasses(Configuration conf,
-      String key,
-      Class<?>... defaultValue) throws IOException {
-    try {
-      return Arrays.asList(conf.getClasses(key, defaultValue));
-    } catch (RuntimeException e) {
-      Throwable c = e.getCause() != null ? e.getCause() : e;
-      throw new IOException("From option " + key + ' ' + c, c);
-    }
-  }
-
-  /**
-   * Maps V1 credential providers to either their equivalent SDK V2 class or hadoop provider.
-   */
-  private static Map<String, Class> initCredentialProvidersMap() {
-    Map<String, Class> v1v2CredentialProviderMap = new HashMap<>();
-
-    v1v2CredentialProviderMap.put("EnvironmentVariableCredentialsProvider",
-        EnvironmentVariableCredentialsProvider.class);
-    v1v2CredentialProviderMap.put("EC2ContainerCredentialsProviderWrapper",
-        IAMInstanceCredentialsProvider.class);
-    v1v2CredentialProviderMap.put("InstanceProfileCredentialsProvider",
-        IAMInstanceCredentialsProvider.class);
-
-    return v1v2CredentialProviderMap;
-  }
-
-  /**
-   * Load list of AWS credential provider/credential provider factory classes;
-   * support a forbidden list to prevent loops, mandate full secrets, etc.
-   * @param binding Binding URI -may be null
-   * @param conf configuration
-   * @param key key
-   * @param forbidden a possibly empty set of forbidden classes.
-   * @param defaultValues list of default providers.
-   * @return the list of classes, possibly empty
-   * @throws IOException on a failure to load the list.
-   */
-  public static AWSCredentialProviderList buildAWSProviderList(
-      @Nullable final URI binding,
-      final Configuration conf,
-      final String key,
-      final List<Class<?>> defaultValues,
-      final Set<Class<?>> forbidden) throws IOException {
-
-    // build up the base provider
-    List<Class<?>> awsClasses = loadAWSProviderClasses(conf,
-        key,
-        defaultValues.toArray(new Class[defaultValues.size()]));
-
-    Map<String, Class> v1v2CredentialProviderMap = initCredentialProvidersMap();
-    // and if the list is empty, switch back to the defaults.
-    // this is to address the issue that configuration.getClasses()
-    // doesn't return the default if the config value is just whitespace.
-    if (awsClasses.isEmpty()) {
-      awsClasses = defaultValues;
-    }
-    // iterate through, checking for blacklists and then instantiating
-    // each provider
-    AWSCredentialProviderList providers = new AWSCredentialProviderList();
-    for (Class<?> aClass : awsClasses) {
-
-      if (forbidden.contains(aClass)) {
-        throw new IOException(E_FORBIDDEN_AWS_PROVIDER
-            + " in option " + key + ": " + aClass);
-      }
-
-      if (v1v2CredentialProviderMap.containsKey(aClass.getSimpleName()) &&
-          aClass.getName().contains(AWS_AUTH_CLASS_PREFIX)){
-        providers.add(createAWSV2CredentialProvider(conf,
-            v1v2CredentialProviderMap.get(aClass.getSimpleName()), binding));
-      } else if (AWSCredentialsProvider.class.isAssignableFrom(aClass)) {
-        providers.add(createAWSV1CredentialProvider(conf,
-            aClass, binding));
-      } else {
-        providers.add(createAWSV2CredentialProvider(conf, aClass, binding));
-      }
-
-    }
-    return providers;
-  }
-
-  /**
-   * Create an AWS credential provider from its class by using reflection.  The
-   * class must implement one of the following means of construction, which are
-   * attempted in order:
-   *
-   * <ol>
-   * <li>a public constructor accepting java.net.URI and
-   *     org.apache.hadoop.conf.Configuration</li>
-   * <li>a public constructor accepting
-   *    org.apache.hadoop.conf.Configuration</li>
-   * <li>a public static method named getInstance that accepts no
-   *    arguments and returns an instance of
-   *    com.amazonaws.auth.AWSCredentialsProvider, or</li>
-   * <li>a public default constructor.</li>
-   * </ol>
-   *
-   * @param conf configuration
-   * @param credClass credential class
-   * @param uri URI of the FS
-   * @return the instantiated class
-   * @throws IOException on any instantiation failure.
-   */
-  private static AWSCredentialsProvider createAWSV1CredentialProvider(Configuration conf,
-      Class<?> credClass, @Nullable URI uri) throws IOException {
-    AWSCredentialsProvider credentials = null;
-    String className = credClass.getName();
-    if (!AWSCredentialsProvider.class.isAssignableFrom(credClass)) {
-      throw new IOException("Class " + credClass + " " + NOT_AWS_PROVIDER);
-    }
-    if (Modifier.isAbstract(credClass.getModifiers())) {
-      throw new IOException("Class " + credClass + " " + ABSTRACT_PROVIDER);
-    }
-    LOG.debug("Credential provider class is {}", className);
-
-    credentials =
-        S3AUtils.getInstanceFromReflection(credClass, conf, uri, AWSCredentialsProvider.class,
-            "getInstance", AWS_CREDENTIALS_PROVIDER);
-    return credentials;
-
-  }
-
-  /**
-   * Create an AWS credential provider from its class by using reflection.  The
-   * class must implement one of the following means of construction, which are
-   * attempted in order:
-   *
-   * <ol>
-   * <li>a public constructor accepting java.net.URI and
-   *     org.apache.hadoop.conf.Configuration</li>
-   * <li>a public constructor accepting
-   *    org.apache.hadoop.conf.Configuration</li>
-   * <li>a public static method named getInstance that accepts no
-   *    arguments and returns an instance of
-   *    software.amazon.awssdk.auth.credentials.AwsCredentialsProvider, or</li>
-   * <li>a public default constructor.</li>
-   * </ol>
-   *
-   * @param conf configuration
-   * @param credClass credential class
-   * @param uri URI of the FS
-   * @return the instantiated class
-   * @throws IOException on any instantiation failure.
-   */
-  private static AwsCredentialsProvider createAWSV2CredentialProvider(Configuration conf,
-      Class<?> credClass, @Nullable URI uri) throws IOException {
-    AwsCredentialsProvider credentials = null;
-    String className = credClass.getName();
-    if (!AwsCredentialsProvider.class.isAssignableFrom(credClass)) {
-      throw new IOException("Class " + credClass + " " + NOT_AWS_V2_PROVIDER);
-    }
-    if (Modifier.isAbstract(credClass.getModifiers())) {
-      throw new IOException("Class " + credClass + " " + ABSTRACT_PROVIDER);
-    }
-    LOG.debug("Credential provider class is {}", className);
-    credentials =
-        S3AUtils.getInstanceFromReflection(credClass, conf, uri, AwsCredentialsProvider.class,
-            "create", AWS_CREDENTIALS_PROVIDER);
-    return credentials;
-  }
-
-}

+ 303 - 0
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/CredentialProviderListFactory.java

@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a.auth;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import javax.annotation.Nullable;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider;
+import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider;
+
+import org.apache.hadoop.classification.VisibleForTesting;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.s3a.AWSCredentialProviderList;
+import org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider;
+import org.apache.hadoop.fs.s3a.Constants;
+import org.apache.hadoop.fs.s3a.S3AUtils;
+import org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider;
+import org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider;
+import org.apache.hadoop.fs.s3a.adapter.AwsV1BindingSupport;
+import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
+import org.apache.hadoop.fs.s3native.S3xLoginHelper;
+import org.apache.hadoop.fs.store.LogExactlyOnce;
+
+import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
+import static org.apache.hadoop.fs.s3a.adapter.AwsV1BindingSupport.isAwsV1SdkAvailable;
+
+/**
+ * This class provides methods to create a {@link AWSCredentialProviderList}
+ * list of AWS credential providers.
+ */
+public final class CredentialProviderListFactory {
+
+  private static final Logger LOG = LoggerFactory.getLogger(CredentialProviderListFactory.class);
+
+  /**
+   * A v1 entry has been remapped. warn once about this and then shut up.
+   */
+  private static final LogExactlyOnce LOG_REMAPPED_ENTRY = new LogExactlyOnce(LOG);
+
+  /**
+   * Error message when the AWS provider list built up contains a forbidden
+   * entry.
+   */
+  @VisibleForTesting
+  public static final String E_FORBIDDEN_AWS_PROVIDER
+      = "AWS provider class cannot be used";
+
+  /**
+   * The standard AWS provider list for AWS connections.
+   */
+  public static final List<Class<?>>
+      STANDARD_AWS_PROVIDERS = Collections.unmodifiableList(
+      Arrays.asList(
+          EnvironmentVariableCredentialsProvider.class,
+          IAMInstanceCredentialsProvider.class,
+          SimpleAWSCredentialsProvider.class,
+          TemporaryAWSCredentialsProvider.class));
+
+  /** V1 credential provider: {@value}. */
+  public static final String ANONYMOUS_CREDENTIALS_V1 =
+      "com.amazonaws.auth.AnonymousAWSCredentials";
+
+  /** V1 credential provider: {@value}. */
+  public static final String EC2_CONTAINER_CREDENTIALS_V1 =
+      "com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper";
+
+  /** V1 credential provider: {@value}. */
+  public static final String EC2_IAM_CREDENTIALS_V1 =
+      "com.amazonaws.auth.InstanceProfileCredentialsProvider";
+
+  /** V2 EC2 instance/container credential provider. */
+  public static final String EC2_IAM_CREDENTIALS_V2 =
+      IAMInstanceCredentialsProvider.class.getName();
+
+  /** V1 env var credential provider: {@value}. */
+  public static final String ENVIRONMENT_CREDENTIALS_V1 =
+      "com.amazonaws.auth.EnvironmentVariableCredentialsProvider";
+
+  /** V2 environment variables credential provider. */
+  public static final String ENVIRONMENT_CREDENTIALS_V2 =
+      EnvironmentVariableCredentialsProvider.class.getName();
+
+  /** V1 profile credential provider: {@value}. */
+  public static final String PROFILE_CREDENTIALS_V1 =
+      "com.amazonaws.auth.profile.ProfileCredentialsProvider";
+
+  /** V2 environment variables credential provider. */
+  public static final String PROFILE_CREDENTIALS_V2 =
+      ProfileCredentialsProvider.class.getName();
+
+  /**
+   * Private map of v1 to v2 credential provider name mapping.
+   */
+  private static final Map<String, String> V1_V2_CREDENTIAL_PROVIDER_MAP =
+      initCredentialProvidersMap();
+
+  private CredentialProviderListFactory() {
+  }
+
+  /**
+   * Create the AWS credentials from the providers, the URI and
+   * the key {@link Constants#AWS_CREDENTIALS_PROVIDER} in the configuration.
+   * @param binding Binding URI -may be null
+   * @param conf filesystem configuration
+   * @return a credentials provider list
+   * @throws IOException Problems loading the providers (including reading
+   * secrets from credential files).
+   */
+  public static AWSCredentialProviderList createAWSCredentialProviderList(
+      @Nullable URI binding,
+      Configuration conf) throws IOException {
+    // this will reject any user:secret entries in the URI
+    S3xLoginHelper.rejectSecretsInURIs(binding);
+    AWSCredentialProviderList credentials =
+        buildAWSProviderList(binding,
+            conf,
+            AWS_CREDENTIALS_PROVIDER,
+            STANDARD_AWS_PROVIDERS,
+            new HashSet<>());
+    // make sure the logging message strips out any auth details
+    LOG.debug("For URI {}, using credentials {}",
+        binding, credentials);
+    return credentials;
+  }
+
+  /**
+   * Load list of AWS credential provider/credential provider factory classes.
+   * @param conf configuration
+   * @param key key
+   * @param defaultValue list of default values
+   * @return the list of classes, empty if the default list is empty and
+   * there was no match for the key in the configuration.
+   * @throws IOException on a failure to load the list.
+   */
+  private static Collection<String> loadAWSProviderClasses(Configuration conf,
+      String key,
+      Class<?>... defaultValue) throws IOException {
+    final Collection<String> classnames = conf.getTrimmedStringCollection(key);
+    if (classnames.isEmpty()) {
+      // empty list; return the defaults
+      return Arrays.stream(defaultValue).map(c -> c.getName()).collect(Collectors.toList());
+    } else {
+      return classnames;
+    }
+  }
+
+  /**
+   * Maps V1 credential providers to either their equivalent SDK V2 class or hadoop provider.
+   */
+  private static Map<String, String> initCredentialProvidersMap() {
+    Map<String, String> v1v2CredentialProviderMap = new HashMap<>();
+
+    v1v2CredentialProviderMap.put(ANONYMOUS_CREDENTIALS_V1,
+        AnonymousAWSCredentialsProvider.NAME);
+    v1v2CredentialProviderMap.put(EC2_CONTAINER_CREDENTIALS_V1,
+        EC2_IAM_CREDENTIALS_V2);
+    v1v2CredentialProviderMap.put(EC2_IAM_CREDENTIALS_V1,
+        EC2_IAM_CREDENTIALS_V2);
+    v1v2CredentialProviderMap.put(ENVIRONMENT_CREDENTIALS_V1,
+        ENVIRONMENT_CREDENTIALS_V2);
+    v1v2CredentialProviderMap.put(PROFILE_CREDENTIALS_V1,
+        PROFILE_CREDENTIALS_V2);
+
+    return v1v2CredentialProviderMap;
+  }
+
+  /**
+   * Load list of AWS credential provider/credential provider factory classes;
+   * support a forbidden list to prevent loops, mandate full secrets, etc.
+   * @param binding Binding URI -may be null
+   * @param conf configuration
+   * @param key configuration key to use
+   * @param forbidden a possibly empty set of forbidden classes.
+   * @param defaultValues list of default providers.
+   * @return the list of classes, possibly empty
+   * @throws IOException on a failure to load the list.
+   */
+  public static AWSCredentialProviderList buildAWSProviderList(
+      @Nullable final URI binding,
+      final Configuration conf,
+      final String key,
+      final List<Class<?>> defaultValues,
+      final Set<Class<?>> forbidden) throws IOException {
+
+    // build up the base provider
+    Collection<String> awsClasses = loadAWSProviderClasses(conf,
+        key,
+        defaultValues.toArray(new Class[defaultValues.size()]));
+
+    Map<String, String> v1v2CredentialProviderMap = V1_V2_CREDENTIAL_PROVIDER_MAP;
+    final Set<String> forbiddenClassnames =
+        forbidden.stream().map(c -> c.getName()).collect(Collectors.toSet());
+
+
+    // iterate through, checking for forbidden values and then instantiating
+    // each provider
+    AWSCredentialProviderList providers = new AWSCredentialProviderList();
+    for (String className : awsClasses) {
+      if (v1v2CredentialProviderMap.containsKey(className)) {
+        // mapping
+
+        final String mapped = v1v2CredentialProviderMap.get(className);
+        LOG_REMAPPED_ENTRY.warn("Credentials option {} contains AWS v1 SDK entry {}; mapping to {}",
+            key, className, mapped);
+        className = mapped;
+      }
+      // now scan the forbidden list. doing this after any mappings ensures the v1 names
+      // are also blocked
+      if (forbiddenClassnames.contains(className)) {
+        throw new InstantiationIOException(InstantiationIOException.Kind.Forbidden,
+            binding, className, key, E_FORBIDDEN_AWS_PROVIDER, null);
+      }
+
+      AwsCredentialsProvider provider;
+      try {
+        provider = createAWSV2CredentialProvider(conf, className, binding, key);
+      } catch (InstantiationIOException e) {
+        // failed to create a v2; try to see if it is a v1
+        if (e.getKind() == InstantiationIOException.Kind.IsNotImplementation) {
+          if (isAwsV1SdkAvailable()) {
+            // try to create v1
+            LOG.debug("Failed to create {} as v2 credentials, trying to instantiate as v1",
+                className);
+            try {
+              provider =
+                  AwsV1BindingSupport.createAWSV1CredentialProvider(conf, className, binding, key);
+              LOG_REMAPPED_ENTRY.warn("Credentials option {} contains AWS v1 SDK entry {}",
+                  key, className);
+            } catch (InstantiationIOException ex) {
+              // if it is something other than non-implementation, throw.
+              // that way, non-impl messages are about v2 not v1 in the error
+              if (ex.getKind() != InstantiationIOException.Kind.IsNotImplementation) {
+                throw ex;
+              } else {
+                throw e;
+              }
+            }
+          } else {
+            LOG.warn("Failed to instantiate {} as AWS v2 SDK credential provider;"
+                + " AWS V1 SDK is not on the classpth so unable to attempt to"
+                + " instantiate as a v1 provider", className, e);
+            throw e;
+          }
+        } else {
+          // any other problem
+          throw e;
+
+        }
+        LOG.debug("From provider class {} created Aws provider {}", className, provider);
+      }
+      providers.add(provider);
+    }
+    return providers;
+  }
+
+  /**
+   * Create an AWS v2 credential provider from its class by using reflection.
+   * @param conf configuration
+   * @param className credential class name
+   * @param uri URI of the FS
+   * @param key configuration key to use
+   * @return the instantiated class
+   * @throws IOException on any instantiation failure.
+   * @see S3AUtils#getInstanceFromReflection
+   */
+  private static AwsCredentialsProvider createAWSV2CredentialProvider(Configuration conf,
+      String className,
+      @Nullable URI uri, final String key) throws IOException {
+    LOG.debug("Credential provider class is {}", className);
+    return S3AUtils.getInstanceFromReflection(className, conf, uri, AwsCredentialsProvider.class,
+        "create", key);
+  }
+
+}

+ 8 - 1
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/IAMInstanceCredentialsProvider.java

@@ -32,7 +32,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 
 /**
  * This is an IAM credential provider which wraps
- * an {@code EC2ContainerCredentialsProviderWrapper}
+ * an {@code ContainerCredentialsProvider}
  * to provide credentials when the S3A connector is instantiated on AWS EC2
  * or the AWS container services.
  * <p>
@@ -90,4 +90,11 @@ public class IAMInstanceCredentialsProvider
   public void close() throws IOException {
     // no-op.
   }
+
+  @Override
+  public String toString() {
+    return "IAMInstanceCredentialsProvider{" +
+        "containerCredentialsProvider=" + containerCredentialsProvider +
+        '}';
+  }
 }

+ 1 - 1
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/SignerFactory.java

@@ -106,7 +106,7 @@ public final class SignerFactory {
     LOG.debug("Signer class is {}", className);
 
     Signer signer =
-        S3AUtils.getInstanceFromReflection(signerClass, null, null, Signer.class, "create",
+        S3AUtils.getInstanceFromReflection(className, null, null, Signer.class, "create",
             configKey);
 
     return signer;

+ 0 - 5
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/S3ADelegationTokens.java

@@ -120,11 +120,6 @@ public class S3ADelegationTokens extends AbstractDTService {
    */
   private AbstractDelegationTokenBinding tokenBinding;
 
-  /**
-   * List of cred providers; unset until {@link #bindToDelegationToken(Token)}.
-   */
-  //private Optional<AWSCredentialProviderList> credentialProviders = Optional.empty();
-
   /**
    * delegation binding information; unset until {@link #bindToDelegationToken(Token)}.
    */

+ 23 - 8
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/SessionTokenBinding.java

@@ -47,8 +47,8 @@ import org.apache.hadoop.io.Text;
 
 import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
 import static org.apache.hadoop.fs.s3a.Invoker.once;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.STANDARD_AWS_PROVIDERS;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.buildAWSProviderList;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.STANDARD_AWS_PROVIDERS;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.buildAWSProviderList;
 import static org.apache.hadoop.fs.s3a.auth.MarshalledCredentialBinding.fromAWSCredentials;
 import static org.apache.hadoop.fs.s3a.auth.MarshalledCredentialBinding.fromSTSCredentials;
 import static org.apache.hadoop.fs.s3a.auth.delegation.DelegationConstants.*;
@@ -102,7 +102,8 @@ public class SessionTokenBinding extends AbstractDelegationTokenBinding {
   private boolean hasSessionCreds;
 
   /**
-   * The auth chain for the parent options.
+   * The parent authentication chain: that used to request
+   * session/role credentials when deployed unbonded.
    */
   private AWSCredentialProviderList parentAuthChain;
 
@@ -161,12 +162,14 @@ public class SessionTokenBinding extends AbstractDelegationTokenBinding {
         DEFAULT_DELEGATION_TOKEN_REGION);
 
     // create the provider set for session credentials.
-    parentAuthChain = buildAWSProviderList(
+    final AWSCredentialProviderList chain = buildAWSProviderList(
         getCanonicalUri(),
         conf,
         AWS_CREDENTIALS_PROVIDER,
         STANDARD_AWS_PROVIDERS,
         new HashSet<>());
+    LOG.debug("Setting parent authentication chain to {}", chain);
+    setParentAuthChain(chain);
   }
 
   @Override
@@ -189,7 +192,7 @@ public class SessionTokenBinding extends AbstractDelegationTokenBinding {
   public AWSCredentialProviderList deployUnbonded()
       throws IOException {
     requireServiceStarted();
-    return parentAuthChain;
+    return getParentAuthChain();
   }
 
   /**
@@ -291,7 +294,7 @@ public class SessionTokenBinding extends AbstractDelegationTokenBinding {
     // throw this.
     final AwsCredentials parentCredentials = once("get credentials",
         "",
-        () -> parentAuthChain.resolveCredentials());
+        () -> getParentAuthChain().resolveCredentials());
     hasSessionCreds = parentCredentials instanceof AwsSessionCredentials;
 
     if (!hasSessionCreds) {
@@ -300,7 +303,7 @@ public class SessionTokenBinding extends AbstractDelegationTokenBinding {
       invoker = new Invoker(new S3ARetryPolicy(conf), LOG_EVENT);
 
       StsClient tokenService =
-          STSClientFactory.builder(parentAuthChain,
+          STSClientFactory.builder(getParentAuthChain(),
               conf,
               endpoint,
               region,
@@ -371,7 +374,7 @@ public class SessionTokenBinding extends AbstractDelegationTokenBinding {
       }
       origin += " " + CREDENTIALS_CONVERTED_TO_DELEGATION_TOKEN;
       final AwsCredentials awsCredentials
-          = parentAuthChain.resolveCredentials();
+          = getParentAuthChain().resolveCredentials();
       if (awsCredentials instanceof AwsSessionCredentials) {
         marshalledCredentials = fromAWSCredentials(
             (AwsSessionCredentials) awsCredentials);
@@ -421,4 +424,16 @@ public class SessionTokenBinding extends AbstractDelegationTokenBinding {
       tokenIdentifier) {
     this.tokenIdentifier = tokenIdentifier;
   }
+
+  /**
+   * The auth chain for the parent options.
+   * @return the parent authentication chain.
+   */
+  protected AWSCredentialProviderList getParentAuthChain() {
+    return parentAuthChain;
+  }
+
+  protected void setParentAuthChain(AWSCredentialProviderList parentAuthChain) {
+    this.parentAuthChain = parentAuthChain;
+  }
 }

+ 1 - 1
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSClientConfig.java

@@ -348,7 +348,7 @@ public final class AWSClientConfig {
     if (configKey != null) {
       String signerOverride = conf.getTrimmed(configKey, "");
       if (!signerOverride.isEmpty()) {
-        LOG.debug("Signer override for {}} = {}", awsServiceIdentifier, signerOverride);
+        LOG.debug("Signer override for {} = {}", awsServiceIdentifier, signerOverride);
         clientConfig.putAdvancedOption(SdkAdvancedClientOption.SIGNER,
             SignerFactory.createSigner(signerOverride, configKey));
       }

+ 9 - 9
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSHeaders.java

@@ -24,7 +24,7 @@ package org.apache.hadoop.fs.s3a.impl;
 public interface AWSHeaders {
 
   /*
-   * Standard HTTP Headers
+   * Standard HTTP Headers.
    */
 
   String CACHE_CONTROL = "Cache-Control";
@@ -40,22 +40,22 @@ public interface AWSHeaders {
   String LAST_MODIFIED = "Last-Modified";
 
   /*
-   * Amazon HTTP Headers used by S3A
+   * Amazon HTTP Headers used by S3A.
    */
 
-  /** S3's version ID header */
+  /** S3's version ID header. */
   String S3_VERSION_ID = "x-amz-version-id";
 
-  /** Header describing what class of storage a user wants */
+  /** Header describing what class of storage a user wants. */
   String STORAGE_CLASS = "x-amz-storage-class";
 
-  /** Header describing what archive tier the object is in, if any */
+  /** Header describing what archive tier the object is in, if any. */
   String ARCHIVE_STATUS = "x-amz-archive-status";
 
-  /** Header for optional server-side encryption algorithm */
+  /** Header for optional server-side encryption algorithm. */
   String SERVER_SIDE_ENCRYPTION = "x-amz-server-side-encryption";
 
-  /** Range header for the get object request */
+  /** Range header for the get object request. */
   String RANGE = "Range";
 
   /**
@@ -65,10 +65,10 @@ public interface AWSHeaders {
   @Deprecated
   String CRYPTO_KEY = "x-amz-key";
 
-  /** JSON-encoded description of encryption materials used during encryption */
+  /** JSON-encoded description of encryption materials used during encryption. */
   String MATERIALS_DESCRIPTION = "x-amz-matdesc";
 
-  /** Header for the optional restore information of an object */
+  /** Header for the optional restore information of an object. */
   String RESTORE = "x-amz-restore";
 
   /**

+ 180 - 0
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/InstantiationIOException.java

@@ -0,0 +1,180 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a.impl;
+
+import java.net.URI;
+
+import javax.annotation.Nullable;
+
+import org.apache.hadoop.fs.PathIOException;
+
+/**
+ * An instantiation exception raised during reflection-based creation
+ * of classes.
+ * Uses an enum of kind so tests/code can examine it, without
+ * creating a full hierarchy of exception classes.
+ */
+public class InstantiationIOException extends PathIOException {
+
+  public static final String ABSTRACT_PROVIDER =
+      "is abstract and therefore cannot be created";
+
+  public static final String CONSTRUCTOR_EXCEPTION = "constructor exception";
+
+  public static final String INSTANTIATION_EXCEPTION
+      = "instantiation exception";
+
+  public static final String DOES_NOT_IMPLEMENT
+      = "does not implement";
+
+  /**
+   * Exception kind.
+   */
+  private final Kind kind;
+
+  /**
+   * Class being instantiated.
+   */
+  private final String classname;
+
+  /**
+   * key used.
+   */
+  private final String key;
+
+  /**
+   * An (extensible) enum of kinds of instantiation failure.
+   */
+  public enum Kind {
+    Forbidden,
+    InstantiationFailure,
+    IsAbstract,
+    IsNotImplementation,
+    Other,
+    Unavailable,
+    UnsupportedConstructor,
+  }
+
+  public InstantiationIOException(
+      Kind kind,
+      @Nullable URI uri, String classname,
+      @Nullable String key,
+      String message,
+      Throwable cause) {
+    super(uri!= null ? uri.toString() : "",
+        "Class " + classname + " " + message
+            + (key != null ? (" (configuration key " + key + ")") : ""),
+        cause);
+    this.kind = kind;
+    this.classname = classname;
+    this.key = key;
+  }
+
+  public String getClassname() {
+    return classname;
+  }
+
+  public Kind getKind() {
+    return kind;
+  }
+
+  public String getKey() {
+    return key;
+  }
+
+  /**
+   * Class is abstract.
+   * @param uri URI of filesystem
+   * @param classname classname.
+   * @param key configuration key
+   * @return an exception.
+   */
+  public static InstantiationIOException isAbstract(URI uri, String classname, String key) {
+    return new InstantiationIOException(Kind.IsAbstract,
+        uri, classname, key, ABSTRACT_PROVIDER, null);
+  }
+
+  /**
+   * Class does not implement the desired interface.
+   * @param uri URI of filesystem
+   * @param classname classname.
+   * @param interfaceName required interface
+   * @param key configuration key
+   * @return an exception.
+   */
+  public static InstantiationIOException isNotInstanceOf(
+      @Nullable URI uri,
+      String classname,
+      String interfaceName,
+      String key) {
+    return new InstantiationIOException(Kind.IsNotImplementation, uri, classname,
+        key, DOES_NOT_IMPLEMENT + " " + interfaceName, null);
+  }
+
+  /**
+   * Class is unavailable for some reason, probably a missing dependency.
+   * @param uri URI of filesystem
+   * @param classname classname.
+   * @param key configuration key
+   * @param text text to include
+   * @return an exception.
+   */
+  public static InstantiationIOException unavailable(
+      @Nullable URI uri,
+      String classname,
+      String key,
+      String text) {
+    return new InstantiationIOException(Kind.Unavailable,
+        uri, classname, key, text, null);
+  }
+
+  /**
+   * Failure to find a valid constructor (signature, visibility) or
+   * factory method.
+   * @param uri URI of filesystem
+   * @param classname classname.
+   * @param key configuration key
+   * @return an exception.
+   */
+  public static InstantiationIOException unsupportedConstructor(
+      @Nullable URI uri,
+      String classname,
+      String key) {
+    return new InstantiationIOException(Kind.UnsupportedConstructor,
+        uri, classname, key, CONSTRUCTOR_EXCEPTION, null);
+  }
+
+  /**
+   * General instantiation failure.
+   * @param uri URI of filesystem
+   * @param classname classname.
+   * @param key configuration key
+   * @param t thrown
+   * @return an exception.
+   */
+  public static InstantiationIOException instantiationException(
+      @Nullable URI uri,
+      String classname,
+      String key,
+      Throwable t) {
+    return new InstantiationIOException(Kind.InstantiationFailure,
+        uri, classname, key, INSTANTIATION_EXCEPTION + " " + t, t);
+  }
+
+}

+ 16 - 64
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/V2Migration.java

@@ -23,11 +23,22 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.fs.store.LogExactlyOnce;
 
+import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.AUDIT_REQUEST_HANDLERS;
 import static org.apache.hadoop.fs.s3a.impl.InternalConstants.SDK_V2_UPGRADE_LOG_NAME;
 
 /**
  * This class provides utility methods required for migrating S3A to AWS Java SDK V2.
  * For more information on the upgrade, see HADOOP-18073.
+ *
+ * <p>in HADOOP-18382. Upgrade AWS SDK to V2 - Prerequisites,
+ * this class contained a series of `LogExactlyOnce` loggers to warn on
+ * the first use of a feature which would change incompatibly; this shipped in Hadoop 3.3.5.
+ * <p>
+ * With the move to v2 completed, attempts to use the v1 classes, will fail
+ * -except for the special case of support for v1 credential providers.
+ * <p>
+ * The warning methods are still present, where appropriate, but downgraded to debug
+ * and only retained for debugging migration issues.
  */
 public final class V2Migration {
 
@@ -35,76 +46,17 @@ public final class V2Migration {
 
   public static final Logger SDK_V2_UPGRADE_LOG = LoggerFactory.getLogger(SDK_V2_UPGRADE_LOG_NAME);
 
-  private static final LogExactlyOnce WARN_ON_DELEGATION_TOKENS =
-      new LogExactlyOnce(SDK_V2_UPGRADE_LOG);
-
-  private static final LogExactlyOnce WARN_ON_GET_S3_CLIENT =
-      new LogExactlyOnce(SDK_V2_UPGRADE_LOG);
-
-  private static final LogExactlyOnce WARN_OF_DIRECTLY_REFERENCED_CREDENTIAL_PROVIDER =
-      new LogExactlyOnce(SDK_V2_UPGRADE_LOG);
-
-  private static final LogExactlyOnce WARN_OF_CUSTOM_SIGNER =
-      new LogExactlyOnce(SDK_V2_UPGRADE_LOG);
-
   private static final LogExactlyOnce WARN_OF_REQUEST_HANDLERS =
       new LogExactlyOnce(SDK_V2_UPGRADE_LOG);
 
-  private static final LogExactlyOnce WARN_ON_GET_OBJECT_METADATA =
-      new LogExactlyOnce(SDK_V2_UPGRADE_LOG);
-
-  /**
-   * Warns on an AWS V1 credential provider being referenced directly.
-   * @param name name of the credential provider
-   */
-  public static void v1ProviderReferenced(String name) {
-    WARN_OF_DIRECTLY_REFERENCED_CREDENTIAL_PROVIDER.warn(
-        "Directly referencing AWS SDK V1 credential provider {}. AWS SDK V1 credential "
-            + "providers will be removed once S3A is upgraded to SDK V2", name);
-  }
-
   /**
-   * Warns on the v1 s3 client being requested.
+   * Notes use of request handlers.
+   * @param handlers handlers declared
    */
-  public static void v1S3ClientRequested() {
-    WARN_ON_GET_S3_CLIENT.warn(
-        "getAmazonS3ClientForTesting() will be removed as part of upgrading S3A to AWS SDK V2");
-  }
-
-  /**
-   * Warns when v1 credential providers are used with delegation tokens.
-   */
-  public static void v1DelegationTokenCredentialProvidersUsed() {
-    WARN_ON_DELEGATION_TOKENS.warn(
-        "The credential provider interface has changed in AWS SDK V2, custom credential "
-            + "providers used in delegation tokens binding classes will need to be updated once "
-            + "S3A is upgraded to SDK V2");
-  }
-
-  /**
-   * Warns on use of custom signers.
-   */
-  public static void v1CustomSignerUsed() {
-    WARN_OF_CUSTOM_SIGNER.warn(
-        "The signer interface has changed in AWS SDK V2, custom signers will need to be updated "
-            + "once S3A is upgraded to SDK V2");
-  }
-
-  /**
-   * Warns on use of request handlers.
-   */
-  public static void v1RequestHandlersUsed() {
+  public static void v1RequestHandlersUsed(final String handlers) {
     WARN_OF_REQUEST_HANDLERS.warn(
-        "The request handler interface has changed in AWS SDK V2, use exception interceptors "
-            + "once S3A is upgraded to SDK V2");
-  }
-
-  /**
-   * Warns on use of getObjectMetadata.
-   */
-  public static void v1GetObjectMetadataCalled() {
-    WARN_ON_GET_OBJECT_METADATA.warn("getObjectMetadata() called. This operation and it's response "
-        + "will be changed as part of upgrading S3A to AWS SDK V2");
+        "Ignoring V1 SDK request handlers set in {}: {}",
+        AUDIT_REQUEST_HANDLERS, handlers);
   }
 
 }

+ 1 - 1
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/assumed_roles.md

@@ -195,7 +195,7 @@ Here are the full set of configuration options.
 <property>
   <name>fs.s3a.assumed.role.credentials.provider</name>
   <value>org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider,
-    com.amazonaws.auth.EnvironmentVariableCredentialsProvider
+    software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider
   </value>
   <description>
     List of credential providers to authenticate with the STS endpoint and

+ 39 - 9
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/auditing.md

@@ -22,7 +22,7 @@ and inside the AWS S3 SDK, immediately before the request is executed.
 The full architecture is covered in [Auditing Architecture](auditing_architecture.html);
 this document covers its use.
 
-## Important: Auditing is disabled by default
+## Important: Auditing is currently enabled
 
 Due to a memory leak from the use of `ThreadLocal` fields, this auditing feature
 leaked memory as S3A filesystem instances were created and deleted.
@@ -32,7 +32,7 @@ See [HADOOP-18091](https://issues.apache.org/jira/browse/HADOOP-18091) _S3A audi
 
 To avoid these memory leaks, auditing was disabled by default in the hadoop 3.3.2 release.
 
-As these memory leaks have now been fixed, auditing has been re-enabled.
+As these memory leaks have now been fixed, auditing has been re-enabled in Hadoop 3.3.5+
 
 To disable it, set `fs.s3a.audit.enabled` to `false`.
 
@@ -77,7 +77,7 @@ ideally even identifying the process/job generating load.
 
 ## Using Auditing
 
-Auditing is disabled by default.
+Auditing is enabled by default.
 When auditing enabled, a Logging Auditor will annotate the S3 logs through a custom
 HTTP Referrer header in requests made to S3.
 Other auditor classes may be used instead.
@@ -88,7 +88,7 @@ Other auditor classes may be used instead.
 |--------|---------|---------------|
 | `fs.s3a.audit.enabled` | Is auditing enabled? | `true` |
 | `fs.s3a.audit.service.classname` | Auditor classname | `org.apache.hadoop.fs.s3a.audit.impl.LoggingAuditor` |
-| `fs.s3a.audit.request.handlers` | List of extra subclasses of AWS SDK RequestHandler2 to include in handler chain | `""` |
+| `fs.s3a.audit.execution.interceptors` | Implementations of AWS v2 SDK `ExecutionInterceptor` to include in handler chain | `""` |
 | `fs.s3a.audit.referrer.enabled` | Logging auditor to publish the audit information in the HTTP Referrer header | `true` |
 | `fs.s3a.audit.referrer.filter` | List of audit fields to filter | `""` |
 | `fs.s3a.audit.reject.out.of.span.operations` | Auditor to reject operations "outside of a span" | `false` |
@@ -96,14 +96,14 @@ Other auditor classes may be used instead.
 
 ### Disabling Auditing.
 
-In this release of Hadoop, auditing is disabled.
+In this release of Hadoop, auditing is enabled by default.
 
 This can be explicitly set globally or for specific buckets
 
 ```xml
 <property>
   <name>fs.s3a.audit.enabled</name>
-  <value>false</value>
+  <value>true</value>
 </property>
 ```
 
@@ -162,6 +162,26 @@ correlate access by S3 clients to the actual operations taking place.
 Note: this logging is described as "Best Effort". There's no guarantee as to
 when logs arrive.
 
+### Integration with AWS SDK request processing
+
+The auditing component inserts itself into the AWS SDK request processing
+code, so it can attach the referrer header.
+
+It is possible to declare extra classes to add to the processing chain,
+all of which must implement the interface `software.amazon.awssdk.core.interceptor.ExecutionInterceptor`.
+
+The list of classes is set in the configuration option `fs.s3a.audit.execution.interceptors`.
+
+Any class in the list which implements `org.apache.hadoop.conf.Configurable` will have
+`Configurable.setConf()` called with the filesystem configuration passed down.
+
+Before the upgrade to the V2 SDK, a list of extra subclasses of the AWS SDK `com.amazonaws.handlers.RequestHandler2`
+class could be declared in the option `fs.s3a.audit.request.handlers`;
+these would be wired up into the V1 request processing pipeline.
+
+This option is now ignored completely, other than printing a warning message the first time a filesystem is created with a non-empty value.
+
+
 ### Rejecting out-of-span operations
 
 The logging auditor can be configured to raise an exception whenever
@@ -201,7 +221,7 @@ The HTTP referrer header is attached by the logging auditor.
 If the S3 Bucket is configured to log requests to another bucket, then these logs
 entries will include the audit information _as the referrer_.
 
-This can be parsed (consult AWS documentation for a regular expression)
+The S3 Server log entries can be parsed (consult AWS documentation for a regular expression)
 and the http referrer header extracted.
 
 ```
@@ -242,13 +262,15 @@ If any of the field values were `null`, the field is omitted.
 
 _Notes_
 
-* Thread IDs are from the current thread in the JVM, so can be compared to those in`````````
+* Thread IDs are from the current thread in the JVM, so can be compared to those in
   Log4J logs. They are never unique.
 * Task Attempt/Job IDs are only ever set during operations involving the S3A committers, specifically
-  all operations excecuted by the committer.
+  all operations executed by the committer.
   Operations executed in the same thread as the committer's instantiation _may_ also report the
   IDs, even if they are unrelated to the actual task. Consider them "best effort".
 
+Thread IDs are generated as follows:
+
 ```java
 Long.toString(Thread.currentThread().getId())
 ```
@@ -269,6 +291,8 @@ This is why the span ID is always passed in as part of the URL,
 rather than just an HTTP query parameter: even if
 the header is chopped, the span ID will always be present.
 
+As of August 2023, this header is not collected in AWS CloudTrail -only S3 Server logs.
+
 ## Privacy Implications of HTTP Referrer auditing
 
 When the S3A client makes requests of an S3 bucket, the auditor
@@ -423,6 +447,12 @@ log4j.logger.org.apache.hadoop.fs.s3a.audit=TRACE
 
 This is very noisy and not recommended in normal operation.
 
+If logging of HTTP IO is enabled then the "referer" header is printed as part of every request:
+```
+log4j.logger.org.apache.http=DEBUG
+log4j.logger.software.amazon.awssdk.thirdparty.org.apache.http.client.HttpClient=DEBUG
+```
+
 ## Integration with S3A Committers
 
 Work submitted through the S3A committer will have the job (query) ID associated

+ 323 - 18
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/aws_sdk_upgrade.md

@@ -20,28 +20,42 @@ This work is tracked in [HADOOP-18073](https://issues.apache.org/jira/browse/HAD
 ## Why the upgrade?
 
 - Moving to SDK V2 will provide performance benefits.
-For example, the [transfer manager for SDKV2](https://aws.amazon.com/blogs/developer/introducing-amazon-s3-transfer-manager-in-the-aws-sdk-for-java-2-x/)
+For example, the [transfer manager for SDK V2](https://aws.amazon.com/blogs/developer/introducing-amazon-s3-transfer-manager-in-the-aws-sdk-for-java-2-x/)
 is built using java bindings of the AWS Common Runtime S3
 client (https://github.com/awslabs/aws-crt-java) (CRT).
 CRT is a set of packages written in C, designed for maximising performance when interacting with AWS
 services such as S3.
+- The V1 SDK is essentially in maintenance mode.
 - New features such as [additional checksum algorithms](https://aws.amazon.com/blogs/aws/new-additional-checksum-algorithms-for-amazon-s3/)
-which S3A will benefit from are not available in SDKV1.
+which S3A will benefit from are not available in SDK V1.
 
 ## What's changing?
 
 The [SDK V2](https://github.com/aws/aws-sdk-java-v2) for S3 is very different from
 [SDK V1](https://github.com/aws/aws-sdk-java), and brings breaking changes for S3A.
-A complete list of the changes can be found in the [Changelog](https://github.com/aws/aws-sdk-java-v2/blob/master/docs/LaunchChangelog.md#41-s3-changes).
+A complete list of the changes can be found in the
+[Changelog](https://github.com/aws/aws-sdk-java-v2/blob/master/docs/LaunchChangelog.md#41-s3-changes).
 
-The major changes and how this affects S3A are listed below.
+## Packaging: `aws-java-sdk-bundle-1.12.x.jar` becomes `bundle-2.x.y.jar`
 
-### Package Change
+As the module name is lost, in hadoop releases a large JAR file with
+the name "bundle" is now part of the distribution.
+This is the AWS V2 SDK shaded artifact.
 
-Package names have changed, all classes in SDK V2 are under `software.amazon.awssdk`, SDK V1 classes
-were under `com.amazonaws`.
+The new and old SDKs can co-exist; the only place that the hadoop code
+may still use the original SDK is when a non-standard V1 AWS credential
+provider is declared.
+
+Any deployment of the S3A connector must include this JAR or
+the subset of non-shaded aws- JARs needed for communication
+with S3 and any other services used.
+As before: the exact set of dependencies used by the S3A connector
+is neither defined nor comes with any commitments of stability
+or compatibility of dependent libraries.
 
-### Credential Providers
+
+
+## Credential Provider changes and migration
 
 - Interface change: [com.amazonaws.auth.AWSCredentialsProvider](https://github.com/aws/aws-sdk-java/blob/master/aws-java-sdk-core/src/main/java/com/amazonaws/auth/AWSCredentialsProvider.java)
 has been replaced by [software.amazon.awssdk.auth.credentials.AwsCredentialsProvider](https://github.com/aws/aws-sdk-java-v2/blob/master/core/auth/src/main/java/software/amazon/awssdk/auth/credentials/AwsCredentialsProvider.java).
@@ -49,23 +63,296 @@ has been replaced by [software.amazon.awssdk.auth.credentials.AwsCredentialsProv
 changed.
 
 The change in interface will mean that custom credential providers will need to be updated to now
-implement `AwsCredentialsProvider` instead of `AWSCredentialProvider`.
+implement `software.amazon.awssdk.auth.credentials.AwsCredentialsProvider` instead of
+`com.amazonaws.auth.AWSCredentialsProvider`.
 
-Due to change in class names, references to SDK V1 credential providers
-in `fs.s3a.aws.credentials.provider` will need to be updated to reference V2 providers.
+### Original V1 `AWSCredentialsProvider` interface
 
-### Delegation Tokens
+Note how the interface begins with the capitalized "AWS" acronym.
+The V2 interface starts with "Aws". This is a very subtle change
+for developers to spot.
+Compilers _will_ detect and report the type mismatch.
+
+
+```java
+package com.amazonaws.auth;
+
+public interface AWSCredentialsProvider {
+
+    public AWSCredentials getCredentials();
+
+    public void refresh();
+
+}
+
+```
+The interface binding also supported a factory method, `AWSCredentialsProvider instance()` which,
+if available, would be invoked in preference to using any constructor.
+
+If the interface implemented `Closeable` or `AutoCloseable`, these would
+be invoked when the provider chain was being shut down.
+
+### New V2 `AwsCredentialsProvider` interface
+
+```java
+package software.amazon.awssdk.auth.credentials;
+
+public interface AwsCredentialsProvider {
+
+  AwsCredentials resolveCredentials();
+
+}
+```
+
+1. There is no `refresh()` method any more.
+2. `getCredentials()` has become `resolveCredentials()`.
+3. There is now the expectation in the SDK that credential resolution/lookup etc will be
+   performed in `resolveCredentials()`.
+4. If the interface implements `Closeable` or `AutoCloseable`, these will
+   be invoked when the provider chain is being shut down.
+5. A static method `create()` which returns an `AwsCredentialsProvider` or subclass; this will be used
+   in preference to a constructor
+
+### S3A `AWSCredentialProviderList` is now a V2 credential provider
+
+The class `org.apache.hadoop.fs.s3a.AWSCredentialProviderList` has moved from
+being a V1 to a V2 credential provider; even if an instance can be created with
+existing code, the V1 methods will not resolve:
+
+```
+java.lang.NoSuchMethodError: org.apache.hadoop.fs.s3a.AWSCredentialProviderList.getCredentials()Lcom/amazonaws/auth/AWSCredentials;
+  at org.apache.hadoop.fs.store.diag.S3ADiagnosticsInfo.validateFilesystem(S3ADiagnosticsInfo.java:903)
+```
+
+### Migration of Credential Providers listed in `fs.s3a.aws.credentials.provider`
+
+
+Before: `fs.s3a.aws.credentials.provider` took a list of v1 credential providers,
+This took a list containing
+1. V1 credential providers implemented in the `hadoop-aws` module.
+2. V1 credential providers implemented in the `aws-sdk-bundle` library.
+3. Custom V1 credential providers placed onto the classpath.
+4. Custom subclasses of hadoop-aws credential providers.
+
+And here is how they change
+1. All `hadoop-aws` credential providers migrated to V2.
+2. Well-known `aws-sdk-bundle` credential providers _automatically remapped_ to their V2 equivalents.
+3. Custom v1 providers supported if the original `aws-sdk-bundle` JAR is on the classpath.
+4. Custom subclasses of hadoop-aws credential providers need manual migration.
+
+Because of (1) and (2), As result, standard `fs.s3a.aws.credentials.provider` configurations
+should seamlessly upgrade. This also means that the same provider list, if restricted to
+those classes, will work across versions.
+
+
+
+### `hadoop-aws` credential providers migration to V2
+
+All the fs.s3a credential providers have the same name and functionality as before.
+
+| Hadoop module credential provider                              | Authentication Mechanism                         |
+|----------------------------------------------------------------|--------------------------------------------------|
+| `org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider`     | Session Credentials in configuration             |
+| `org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider`        | Simple name/secret credentials in configuration  |
+| `org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider`     | Anonymous Login                                  |
+| `org.apache.hadoop.fs.s3a.auth.AssumedRoleCredentialProvider`  | [Assumed Role credentials](./assumed_roles.html) |
+| `org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider` | EC2/k8s instance credentials                     |
+
+### Automatic `aws-sdk-bundle` credential provider remapping
+
+The commonly-used set of V1 credential providers are automatically remapped to V2 equivalents.
+
+
+
+| V1 Credential Provider                                      | Remapped V2 substitute                                                           |
+|-------------------------------------------------------------|----------------------------------------------------------------------------------|
+| `com.amazonaws.auth.AnonymousAWSCredentials`                | `org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider`                       |
+| `com.amazonaws.auth.EnvironmentVariableCredentialsProvider` | `software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider` |
+| `com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper` | `org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider`                   |
+| `com.amazonaws.auth.InstanceProfileCredentialsProvider`     | `org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider`                   |
+| `com.amazonaws.auth.profile.ProfileCredentialsProvider`     | `software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider`             |
+
+There are still a number of troublespots here:
+
+#### Less widely used`com.amazonaws.auth.` AWS providers
+
+There should be equivalents in the new SDK, but as well as being renamed
+they are likely to have moved different factory/builder mechanisms.
+Identify the changed classes and use their
+names in the `fs.s3a.aws.credentials.provider` option.
+
+If a V2 equivalent is not found; provided the V1 SDK is added to the classpath,
+it should still be possible to use the existing classes.
+
+
+#### Private/third-party credential providers
+
+Provided the V1 SDK is added to the classpath,
+it should still be possible to use the existing classes.
+
+Adding a V2 equivalent is the recommended long-term solution.
+
+#### Custom subclasses of the Hadoop credential providers
 
-Custom credential providers used in delegation token binding classes will also need to be updated.
+Because all the standard hadoop credential providers have been upgraded,
+any subclasses of these are not going to link or work.
 
-### AmazonS3 replaced by S3Client
+These will need to be manually migrated to being V2 Credential providers.
 
-The s3 client is an instance of `S3Client` in V2 rather than `AmazonS3`.
 
-For this reason, the `S3ClientFactory` will be deprecated and replaced by one that creates a V2
-`S3Client`.
+## Source code/binary integration changes
+
+The major changes and how this affects S3A are listed below.
+
+### SDK API Package Change
+
+* Package names have changed, all classes in SDK V2 are under `software.amazon.awssdk`, SDK V1 classes
+were under `com.amazonaws`.
+* There is no interoperability between the old and new classes.
+* All classnames are different, often in very subtle ways. It is possible to use both in the same
+  class, as is done in the package `org.apache.hadoop.fs.s3a.adapter`.
+* All the core message classes are now automatically generated from a JSON protocol description.
+* All getter methods have been renamed.
+* All classes are constructed via builder methods
+* Message classes are no longer Java `Serializable`.
+
+Most of these changes simply create what will feel to be gratuitous migration effort;
+the removable of the `Serializable` nature from all message response classes can
+potentially break applications -such as anything passing them between Spark workers.
+See AWS SDK V2 issue [Simplify Modeled Message Marshalling #82](https://github.com/aws/aws-sdk-java-v2/issues/82),
+note that it was filed in 2017, then implement your own workaround pending that issue
+being resolved.
+
+### Compilation/Linkage Errors
+
+Any code making use of V1 sdk classes will fail if they
+* Expect the V1 sdk classes to be on the classpath when `hadoop-aws` is declared as a dependency
+* Use V1-SDK-compatible methods previously exported by the `S3AFileSystem` class and associated classes.
+* Try to pass s3a classes to V1 SDK classes (e.g. credential providers).
+
+The sole solution to these problems is "move to the V2 SDK".
+
+Some `S3AUtils` methods are deleted
+```
+cannot find symbol
+[ERROR]   symbol:   method createAwsConf(org.apache.hadoop.conf.Configuration,java.lang.String)
+[ERROR]   location: class org.apache.hadoop.fs.s3a.S3AUtils
+```
+
+The signature and superclass of `AWSCredentialProviderList` has changed, which can surface in different
+ways
+
+Signature mismatch
+```
+ cannot find symbol
+[ERROR]   symbol:   method getCredentials()
+[ERROR]   location: variable credentials of type org.apache.hadoop.fs.s3a.AWSCredentialProviderList
+```
+
+It is no longer a V1 credential provider, cannot be used to pass credentials to a V1 SDK class
+```
+incompatible types: org.apache.hadoop.fs.s3a.AWSCredentialProviderList cannot be converted to com.amazonaws.auth.AWSCredentialsProvider
+```
+
+### `AmazonS3` replaced by `S3Client`; factory and accessor changed.
+
+The V1 s3 client class `com.amazonaws.services.s3.AmazonS3` has been superseded by
+`software.amazon.awssdk.services.s3.S3Client`
+
+The `S3ClientFactory` interface has been replaced by one that creates a V2 `S3Client`.
+* Custom implementations will need to be updated.
+* The `InconsistentS3ClientFactory` class has been deleted.
+
+#### `S3AFileSystem` method changes: `S3AInternals`.
+
+The low-level s3 operations/client accessors have been moved into a new interface,
+`org.apache.hadoop.fs.s3a.S3AInternals`, which must be accessed via the
+`S3AFileSystem.getS3AInternals()` method.
+They have also been updated to return V2 SDK classes.
+
+```java
+@InterfaceStability.Unstable
+@InterfaceAudience.LimitedPrivate("testing/diagnostics")
+public interface S3AInternals {
+  S3Client getAmazonS3V2ClientForTesting(String reason);
+
+  @Retries.RetryTranslated
+  @AuditEntryPoint
+  String getBucketLocation() throws IOException;
+
+  @AuditEntryPoint
+  @Retries.RetryTranslated
+  String getBucketLocation(String bucketName) throws IOException;
+
+  @AuditEntryPoint
+  @Retries.RetryTranslated
+  HeadObjectResponse getObjectMetadata(Path path) throws IOException;
+
+  AWSCredentialProviderList shareCredentials(final String purpose);
+}
+```
+
+
+##### `S3AFileSystem.getAmazonS3ClientForTesting(String)` moved and return type changed
+
+The `S3AFileSystem.getAmazonS3ClientForTesting()` method has been been deleted.
+
+Compilation
+```
+cannot find symbol
+[ERROR]   symbol:   method getAmazonS3ClientForTesting(java.lang.String)
+[ERROR]   location: variable fs of type org.apache.hadoop.fs.s3a.S3AFileSystem
+```
+
+It has been replaced by an `S3AInternals` equivalent which returns the V2 `S3Client`
+of the filesystem instance.
+
+```java
+((S3AFilesystem)fs).getAmazonS3ClientForTesting("testing")
+```
+
+```java
+((S3AFilesystem)fs).getS3AInternals().getAmazonS3ClientForTesting("testing")
+```
+
+##### `S3AFileSystem.getObjectMetadata(Path path)`  moved to `S3AInternals`; return type changed
+
+The `getObjectMetadata(Path)` call has been moved to the `S3AInternals` interface
+and an instance of the `software.amazon.awssdk.services.s3.model.HeadObjectResponse` class
+returned.
+The original `S3AFileSystem` method has been deleted
+
+Before:
+```java
+((S3AFilesystem)fs).getObjectMetadata(path)
+```
+
+After:
+```java
+((S3AFilesystem)fs).getS3AInternals().getObjectMetadata(path)
+```
+
+##### `AWSCredentialProviderList shareCredentials(String)` moved to `S3AInternals`
+
+The operation to share a reference-counted access to the AWS credentials used
+by the S3A FS has been moved to `S3AInternals`.
+
+This is very much an implementation method, used to allow extension modules to share
+an authentication chain into other AWS SDK client services (dynamoDB, etc.).
+
+### Delegation Tokens
+
+1. Custom credential providers used in delegation token binding classes will need to be updated
+2. The return type from delegation token binding has changed to support more class
+   instances being returned in the future.
+
+`AWSCredentialProviderList` has been upgraded to the V2 API.
+* It still retains a `refresh()` method but this is now a deprecated no-op.
+* It is still `Closeable`; its `close()` method iterates through all entries in
+the list; if they are `Closeable` or `AutoCloseable` then their `close()` method is invoked.
+* Accordingly, providers may still perform background refreshes in separate threads;
+  the S3A client will close its provider list when the filesystem itself is closed.
 
-The `getAmazonS3ClientForTesting()` method will also be updated to return the `S3Client`.
 
 ### Signers
 
@@ -74,3 +361,21 @@ has been replaced by [software.amazon.awssdk.core.signer.Signer](https://github.
 
 The change in signers will mean the custom signers will need to be updated to implement the new
 interface.
+
+There is no support to assist in this migration.
+
+### S3A Auditing Extensions.
+
+The callbacks from the SDK have all changed, as has
+the interface `org.apache.hadoop.fs.s3a.audit.AWSAuditEventCallbacks`
+
+Examine the interface and associated implementations to
+see how to migrate.
+
+The option `fs.s3a.audit.request.handlers` to declare a list of v1 SDK
+`com.amazonaws.handlers.RequestHandler2` implementations to include
+in the AWS request chain is no longer supported: a warning is printed
+and the value ignored.
+
+The V2 SDK equivalent, classes implementing `software.amazon.awssdk.core.interceptor.ExecutionInterceptor`
+can be declared in the configuration option `fs.s3a.audit.execution.interceptors`.

+ 1 - 1
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/delegation_tokens.md

@@ -339,7 +339,7 @@ Here is the effective list of providers if none are declared:
   <value>
     org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider,
     org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider,
-    com.amazonaws.auth.EnvironmentVariableCredentialsProvider,
+    software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider,
     org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider
   </value>
 </property>

+ 46 - 96
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md

@@ -249,56 +249,39 @@ a warning has been printed since Hadoop 2.8 whenever such a URL was used.
 ```xml
 <property>
   <name>fs.s3a.access.key</name>
-  <description>AWS access key ID.
-   Omit for IAM role-based or provider-based authentication.</description>
+  <description>AWS access key ID used by S3A file system. Omit for IAM role-based or provider-based authentication.</description>
 </property>
 
 <property>
   <name>fs.s3a.secret.key</name>
-  <description>AWS secret key.
-   Omit for IAM role-based or provider-based authentication.</description>
+  <description>AWS secret key used by S3A file system. Omit for IAM role-based or provider-based authentication.</description>
 </property>
 
 <property>
-  <name>fs.s3a.aws.credentials.provider</name>
-  <description>
-    Comma-separated class names of credential provider classes which implement
-    com.amazonaws.auth.AWSCredentialsProvider.
-
-    These are loaded and queried in sequence for a valid set of credentials.
-    Each listed class must implement one of the following means of
-    construction, which are attempted in order:
-    1. a public constructor accepting java.net.URI and
-        org.apache.hadoop.conf.Configuration,
-    2. a public static method named getInstance that accepts no
-       arguments and returns an instance of
-       com.amazonaws.auth.AWSCredentialsProvider, or
-    3. a public default constructor.
-
-    Specifying org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider allows
-    anonymous access to a publicly accessible S3 bucket without any credentials.
-    Please note that allowing anonymous access to an S3 bucket compromises
-    security and therefore is unsuitable for most use cases. It can be useful
-    for accessing public data sets without requiring AWS credentials.
-
-    If unspecified, then the default list of credential provider classes,
-    queried in sequence, is:
-    1. org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider:
-       Uses the values of fs.s3a.access.key and fs.s3a.secret.key.
-    2. com.amazonaws.auth.EnvironmentVariableCredentialsProvider: supports
-        configuration of AWS access key ID and secret access key in
-        environment variables named AWS_ACCESS_KEY_ID and
-        AWS_SECRET_ACCESS_KEY, as documented in the AWS SDK.
-    3. com.amazonaws.auth.InstanceProfileCredentialsProvider: supports use
-        of instance profile credentials if running in an EC2 VM.
+  <name>fs.s3a.session.token</name>
+  <description>Session token, when using org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider
+    as one of the providers.
   </description>
 </property>
 
 <property>
-  <name>fs.s3a.session.token</name>
+  <name>fs.s3a.aws.credentials.provider</name>
+  <value>
+    org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider,
+    org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider,
+    software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider,
+    org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider
+  </value>
   <description>
-    Session token, when using org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider
-    as one of the providers.
+    Comma-separated class names of credential provider classes which implement
+    software.amazon.awssdk.auth.credentials.AwsCredentialsProvider.
+
+    When S3A delegation tokens are not enabled, this list will be used
+    to directly authenticate with S3 and other AWS services.
+    When S3A Delegation tokens are enabled, depending upon the delegation
+    token binding it may be used
+    to communicate wih the STS endpoint to request session/role
+    credentials.
   </description>
 </property>
 ```
@@ -350,13 +333,19 @@ credentials if they are defined.
 1. The [AWS environment variables](http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html#cli-environment),
 are then looked for: these will return session or full credentials depending
 on which values are set.
-1. An attempt is made to query the Amazon EC2 Instance Metadata Service to
+1. An attempt is made to query the Amazon EC2 Instance/k8s container Metadata Service to
  retrieve credentials published to EC2 VMs.
 
 S3A can be configured to obtain client authentication providers from classes
-which integrate with the AWS SDK by implementing the `com.amazonaws.auth.AWSCredentialsProvider`
-Interface. This is done by listing the implementation classes, in order of
+which integrate with the AWS SDK by implementing the
+`software.amazon.awssdk.auth.credentials.AwsCredentialsProvider`
+interface.
+This is done by listing the implementation classes, in order of
 preference, in the configuration option `fs.s3a.aws.credentials.provider`.
+In previous hadoop releases, providers were required to
+implement the AWS V1 SDK interface `com.amazonaws.auth.AWSCredentialsProvider`.
+Consult the [Upgrading S3A to AWS SDK V2](./aws_sdk_upgrade.html) documentation
+to see how to migrate credential providers.
 
 *Important*: AWS Credential Providers are distinct from _Hadoop Credential Providers_.
 As will be covered later, Hadoop Credential Providers allow passwords and other secrets
@@ -371,21 +360,23 @@ this is advised as a more secure way to store valuable secrets.
 
 There are a number of AWS Credential Providers inside the `hadoop-aws` JAR:
 
-| classname | description |
-|-----------|-------------|
-| `org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider`| Session Credentials |
-| `org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider`| Simple name/secret credentials |
-| `org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider`| Anonymous Login |
-| `org.apache.hadoop.fs.s3a.auth.AssumedRoleCredentialProvider<`| [Assumed Role credentials](assumed_roles.html) |
+| Hadoop module credential provider                              | Authentication Mechanism                         |
+|----------------------------------------------------------------|--------------------------------------------------|
+| `org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider`     | Session Credentials in configuration             |
+| `org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider`        | Simple name/secret credentials in configuration  |
+| `org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider`     | Anonymous Login                                  |
+| `org.apache.hadoop.fs.s3a.auth.AssumedRoleCredentialProvider`  | [Assumed Role credentials](./assumed_roles.html) |
+| `org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider` | EC2/k8s instance credentials                     |
 
 
-There are also many in the Amazon SDKs, in particular two which are automatically
-set up in the authentication chain:
+There are also many in the Amazon SDKs, with the common ones being.
 
 | classname | description |
 |-----------|-------------|
-| `com.amazonaws.auth.InstanceProfileCredentialsProvider`| EC2 Metadata Credentials |
-| `com.amazonaws.auth.EnvironmentVariableCredentialsProvider`| AWS Environment Variables |
+| `software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider` | AWS Environment Variables |
+| `software.amazon.awssdk.auth.credentials.InstanceProfileCredentialsProvider`| EC2 Metadata Credentials |
+| `software.amazon.awssdk.auth.credentials.ContainerCredentialsProvider`| EC2/k8s Metadata Credentials |
+
 
 
 ### <a name="auth_iam"></a> EC2 IAM Metadata Authentication with `InstanceProfileCredentialsProvider`
@@ -402,7 +393,7 @@ You can configure Hadoop to authenticate to AWS using a [named profile](https://
 
 To authenticate with a named profile:
 
-1. Declare `com.amazonaws.auth.profile.ProfileCredentialsProvider` as the provider.
+1. Declare `software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider` as the provider.
 1. Set your profile via the `AWS_PROFILE` environment variable.
 1. Due to a [bug in version 1 of the AWS Java SDK](https://github.com/aws/aws-sdk-java/issues/803),
 you'll need to remove the `profile` prefix from the AWS configuration section heading.
@@ -525,50 +516,9 @@ This means that the default S3A authentication chain can be defined as
   <value>
     org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider,
     org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider,
-    com.amazonaws.auth.EnvironmentVariableCredentialsProvider,
+    software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider
     org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider
   </value>
-  <description>
-    Comma-separated class names of credential provider classes which implement
-    com.amazonaws.auth.AWSCredentialsProvider.
-
-    When S3A delegation tokens are not enabled, this list will be used
-    to directly authenticate with S3 and other AWS services.
-    When S3A Delegation tokens are enabled, depending upon the delegation
-    token binding it may be used
-    to communicate with the STS endpoint to request session/role
-    credentials.
-
-    These are loaded and queried in sequence for a valid set of credentials.
-    Each listed class must implement one of the following means of
-    construction, which are attempted in order:
-    * a public constructor accepting java.net.URI and
-        org.apache.hadoop.conf.Configuration,
-    * a public constructor accepting org.apache.hadoop.conf.Configuration,
-    * a public static method named getInstance that accepts no
-       arguments and returns an instance of
-       com.amazonaws.auth.AWSCredentialsProvider, or
-    * a public default constructor.
-
-    Specifying org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider allows
-    anonymous access to a publicly accessible S3 bucket without any credentials.
-    Please note that allowing anonymous access to an S3 bucket compromises
-    security and therefore is unsuitable for most use cases. It can be useful
-    for accessing public data sets without requiring AWS credentials.
-
-    If unspecified, then the default list of credential provider classes,
-    queried in sequence, is:
-    * org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider: looks
-       for session login secrets in the Hadoop configuration.
-    * org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider:
-       Uses the values of fs.s3a.access.key and fs.s3a.secret.key.
-    * com.amazonaws.auth.EnvironmentVariableCredentialsProvider: supports
-        configuration of AWS access key ID and secret access key in
-        environment variables named AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY,
-        and AWS_SESSION_TOKEN as documented in the AWS SDK.
-    * org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider: picks up
-       IAM credentials of any EC2 VM or AWS container in which the process is running.
-  </description>
 </property>
 ```
 
@@ -1414,7 +1364,7 @@ role information available when deployed in Amazon EC2.
 ```xml
 <property>
   <name>fs.s3a.aws.credentials.provider</name>
-  <value>com.amazonaws.auth.InstanceProfileCredentialsProvider</value>
+  <value>org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider</value>
 </property>
 ```
 
@@ -2136,7 +2086,7 @@ If no custom signers are being used - this value does not need to be set.
 
 `SignerName:SignerClassName` - register a new signer with the specified name,
 and the class for this signer.
-The Signer Class must implement `com.amazonaws.auth.Signer`.
+The Signer Class must implement `software.amazon.awssdk.core.signer.Signer`.
 
 `SignerName:SignerClassName:SignerInitializerClassName` - similar time above
 except also allows for a custom SignerInitializer

+ 38 - 5
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/s3_select.md

@@ -14,7 +14,7 @@
 
 # S3 Select
 
-**Experimental Feature**
+**Deprecated Feature**
 
 <!-- MACRO{toc|fromDepth=0|toDepth=5} -->
 
@@ -60,6 +60,20 @@ Record Readers.
 It's better here to directly use the Apache Spark, Hive, Impala, Flink or
 similar, which all use the latest ASF-supported libraries.
 
+## Dependencies: eventstream JAR
+
+To use S3 Select through the S3A connector, an extra JAR MUST be added to the classpath of your application,
+`eventstream-1.0.1.jar`.a
+For command line tool use, this should be done by adding it to `share/hadoop/common/lib/`
+
+```xml
+<dependency>
+  <groupId>software.amazon.eventstream</groupId>
+  <artifactId>eventstream</artifactId>
+  <version>1.0.1</version>
+</dependency>
+```
+
 ## Enabling/Disabling S3 Select
 
 S3 Select is enabled by default:
@@ -288,10 +302,12 @@ hadoop s3guard \
 ```
 
 
-## Use in MR/Analytics queries: Work in Progress
+## Use in MR/Analytics queries: Partially Supported
 
-S3 Select support in analytics queries is a work in progress. It does
-not work reliably with large source files where the work is split up.
+S3 Select support in analytics queries is only partially supported.
+It does not work reliably with large source files where the work is split up,
+and as the various query engines all assume that .csv and .json formats are splittable,
+things go very wrong, fast.
 
 As a proof of concept *only*, S3 Select queries can be made through
 MapReduce jobs which use any Hadoop `RecordReader`
@@ -663,6 +679,24 @@ to the `get()` call: do it.
 
 ## Troubleshooting
 
+### `NoClassDefFoundError: software/amazon/eventstream/MessageDecoder`
+
+Select operation failing with a missing eventstream class.
+
+```
+java.io.IOException: java.lang.NoClassDefFoundError: software/amazon/eventstream/MessageDecoder
+at org.apache.hadoop.fs.s3a.select.SelectObjectContentHelper.select(SelectObjectContentHelper.java:75)
+at org.apache.hadoop.fs.s3a.WriteOperationHelper.lambda$select$10(WriteOperationHelper.java:660)
+at org.apache.hadoop.fs.store.audit.AuditingFunctions.lambda$withinAuditSpan$0(AuditingFunctions.java:62)
+at org.apache.hadoop.fs.s3a.Invoker.once(Invoker.java:122)
+```
+
+The eventstream JAR is not on the classpath/not in sync with the version of the full "bundle.jar" JDK
+
+Fix: get a compatible version of the JAR on the classpath.
+
+### SQL errors
+
 Getting S3 Select code to work is hard, though those knowledgeable in SQL
 will find it easier.
 
@@ -673,7 +707,6 @@ Problems can be split into:
 1. Datatype casting issues
 1. Bad records/data in source files.
 1. Failure to configure MR jobs to work correctly.
-1. Failure of MR jobs due to
 
 The exceptions here are all based on the experience during writing tests;
 more may surface with broader use.

+ 4 - 51
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md

@@ -1002,9 +1002,6 @@ using an absolute XInclude reference to it.
 
 ## <a name="failure-injection"></a>Failure Injection
 
-**Warning do not enable any type of failure injection in production.  The
-following settings are for testing only.**
-
 S3A provides an "Inconsistent S3 Client Factory" that can be used to
 simulate throttling by injecting random failures on S3 client requests.
 
@@ -1016,55 +1013,8 @@ inconsistencies during testing of S3Guard. Now that S3 is consistent,
 injecting inconsistency is no longer needed during testing.
 
 
-### Enabling the InconsistentS3CClientFactory
-
 
-To enable the fault-injecting client via configuration, switch the
-S3A client to use the "Inconsistent S3 Client Factory" when connecting to
-S3:
-
-```xml
-<property>
-  <name>fs.s3a.s3.client.factory.impl</name>
-  <value>org.apache.hadoop.fs.s3a.InconsistentS3ClientFactory</value>
-</property>
-```
-
-The inconsistent client will, on every AWS SDK request,
-generate a random number, and if less than the probability,
-raise a 503 exception.
-
-```xml
-
-<property>
-  <name>fs.s3a.failinject.throttle.probability</name>
-  <value>0.05</value>
-</property>
-```
-
-These exceptions are returned to S3; they do not test the
-AWS SDK retry logic.
-
-
-### Using the `InconsistentS3CClientFactory` in downstream integration tests
-
-The inconsistent client is shipped in the `hadoop-aws` JAR, so it can
-be used in integration tests.
-
-## <a name="s3guard"></a> Testing S3Guard
-
-As part of the removal of S3Guard from the production code, the tests have been updated
-so that
-
-* All S3Guard-specific tests have been deleted.
-* All tests parameterized on S3Guard settings have had those test configurations removed.
-* The maven profiles option to run tests with S3Guard have been removed.
-
-There is no need to test S3Guard -and so tests are lot faster.
-(We developers are all happy)
-
-
-##<a name="assumed_roles"></a> Testing Assumed Roles
+## <a name="assumed_roles"></a> Testing Assumed Roles
 
 Tests for the AWS Assumed Role credential provider require an assumed
 role to request.
@@ -1285,10 +1235,13 @@ time bin/hadoop fs -copyToLocal -t 10  $BUCKET/\*aws\* tmp
 
 # ---------------------------------------------------
 # S3 Select on Landsat
+# this will fail with a ClassNotFoundException unless
+# eventstore JAR is added to the classpath
 # ---------------------------------------------------
 
 export LANDSATGZ=s3a://landsat-pds/scene_list.gz
 
+
 bin/hadoop s3guard select -header use -compression gzip $LANDSATGZ \
  "SELECT s.entityId,s.cloudCover FROM S3OBJECT s WHERE s.cloudCover < '0.0' LIMIT 100"
 

+ 40 - 55
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md

@@ -70,14 +70,45 @@ These are Hadoop filesystem client classes, found in the `hadoop-aws` JAR.
 An exception reporting this class as missing means that this JAR is not on
 the classpath.
 
-### `ClassNotFoundException: com.amazonaws.services.s3.AmazonS3Client`
 
-(or other `com.amazonaws` class.)
+### `NoClassDefFoundError: software/amazon/awssdk/crt/s3/S3MetaRequest`
+
+The library `aws-crt.jar` is not on the classpath. Its classes
+are not in the AWS `bundle.jar` file, yet may be needed by some uses made
+of the SDK.
+
+Fix: add.
+
+```
+java.lang.BootstrapMethodError: java.lang.NoClassDefFoundError: software/amazon/awssdk/crt/s3/S3MetaRequest
+at software.amazon.awssdk.services.s3.internal.crt.S3MetaRequestPauseObservable.<init>(S3MetaRequestPauseObservable.java:33)
+at software.amazon.awssdk.transfer.s3.internal.DefaultS3TransferManager.uploadFile(DefaultS3TransferManager.java:205)
+at org.apache.hadoop.fs.s3a.S3AFileSystem.putObject(S3AFileSystem.java:3064)
+at org.apache.hadoop.fs.s3a.S3AFileSystem.executePut(S3AFileSystem.java:4054)
+
+```
+### `ClassNotFoundException: software.amazon.awssdk.services.s3.S3Client`
+
+(or other `software.amazon` class.)
 
-This means that the `aws-java-sdk-bundle.jar` JAR is not on the classpath:
+This means that the AWS V2 SDK `bundle.jar` JAR is not on the classpath:
 add it.
 
-### `java.lang.NoSuchMethodError` referencing a `com.amazonaws` class
+### `ClassNotFoundException: com.amazonaws.auth.AWSCredentials`
+
+(or other `com.amazonaws` class.)
+
+With the move to the [V2 AWS SDK](../aws_sdk_upgrade.html),
+the v1 SDK classes are no longer on the classpath.
+
+If this happens when trying to use a custom credential provider defined
+in `fs.s3a.aws.credentials.provider`, then add the `aws-sdk-bundle.jar`
+JAR to the classpath.
+
+If this happens in your own/third-party code, then again, add the JAR,
+and/or consider moving to the v2 sdk yourself.
+
+### `java.lang.NoSuchMethodError` referencing a `software.amazon` class
 
 This can be triggered by incompatibilities between the AWS SDK on the classpath
 and the version which Hadoop was compiled with.
@@ -86,12 +117,14 @@ The AWS SDK JARs change their signature enough between releases that the only
 way to safely update the AWS SDK version is to recompile Hadoop against the later
 version.
 
-The sole fix is to use the same version of the AWS SDK with which Hadoop
+The fix is to use the same version of the AWS SDK with which Hadoop
 was built.
 
 This can also be caused by having more than one version of an AWS SDK
-JAR on the classpath. If the full `aws-java-sdk-bundle<` JAR is on the
-classpath, do not add any of the `aws-sdk-` JARs.
+JAR on the classpath. If the full `bundle.jar` JAR is on the
+classpath, do not add any of the `aws-sdk-` JARs *except* for
+`aws-crt.jar` (which is required) and
+`eventstream.jar` which is required when using S3 Select.
 
 
 ### `java.lang.NoSuchMethodError` referencing an `org.apache.hadoop` class
@@ -1990,51 +2023,3 @@ com.amazonaws.SdkClientException: Unable to execute HTTP request:
 
 When this happens, try to set `fs.s3a.connection.request.timeout` to a larger value or disable it
 completely by setting it to `0`.
-
-## <a name="upgrade_warnings"></a> SDK Upgrade Warnings
-
-S3A will soon be upgraded to [AWS's Java SDK V2](https://github.com/aws/aws-sdk-java-v2).
-For more information on the upgrade and what's changing, see
-[Upcoming upgrade to AWS Java SDK V2](./aws_sdk_upgrade.html).
-
-S3A logs the following warnings for things that will be changing in the upgrade. To disable these
-logs, comment out `log4j.logger.org.apache.hadoop.fs.s3a.SDKV2Upgrade` in log4j.properties.
-
-### <a name="ProviderReferenced"></a>  `Directly referencing AWS SDK V1 credential provider`
-
-This will be logged when an AWS credential provider is referenced directly in
-`fs.s3a.aws.credentials.provider`.
-For example, `com.amazonaws.auth.AWSSessionCredentialsProvider`
-
-To stop this warning, remove any AWS credential providers from `fs.s3a.aws.credentials.provider`.
-Instead, use S3A's credential providers.
-
-### <a name="ClientRequested"></a>  `getAmazonS3ClientForTesting() will be removed`
-
-This will be logged when `getAmazonS3ClientForTesting()` is called to get the S3 Client. With V2,
-the S3 client will change from type `com.amazonaws.services.s3.AmazonS3` to
-`software.amazon.awssdk.services.s3.S3Client`, and so this method will be removed.
-
-### <a name="DelegationTokenProvider"></a>
-### `Custom credential providers used in delegation tokens binding classes will need to be updated`
-
-This will be logged when delegation tokens are used.
-Delegation tokens allow the use of custom binding classes which can implement custom credential
-providers.
-These credential providers will currently be implementing
-`com.amazonaws.auth.AWSCredentialsProvider` and will need to be updated to implement
-`software.amazon.awssdk.auth.credentials.AwsCredentialsProvider`.
-
-### <a name="CustomSignerUsed"></a>
-### `The signer interface has changed in AWS SDK V2, custom signers will need to be updated`
-
-This will be logged when a custom signer is used.
-Custom signers will currently be implementing `com.amazonaws.auth.Signer` and will need to be
-updated to implement `software.amazon.awssdk.core.signer.Signer`.
-
-### <a name="GetObjectMetadataCalled"></a>
-### `getObjectMetadata() called. This operation and it's response will be changed`
-
-This will be logged when `getObjectMetadata` is called. In SDK V2, this operation has changed to
-`headObject()` and will return a response of the type `HeadObjectResponse`.
-

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java

@@ -64,7 +64,7 @@ public abstract class AbstractS3AMockTest {
     // unset S3CSE property from config to avoid pathIOE.
     conf.unset(Constants.S3_ENCRYPTION_ALGORITHM);
     fs.initialize(uri, conf);
-    s3 = fs.getAmazonS3ClientForTesting("mocking");
+    s3 = fs.getS3AInternals().getAmazonS3V2ClientForTesting("mocking");
   }
 
   public Configuration createConfiguration() {

+ 8 - 0
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java

@@ -210,6 +210,14 @@ public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase
     return (S3AFileSystem) super.getFileSystem();
   }
 
+  /**
+   * Get the {@link S3AInternals} internal access for the
+   * test filesystem.
+   * @return internals.
+   */
+  public S3AInternals getS3AInternals() {
+    return getFileSystem().getS3AInternals();
+  }
   /**
    * Describe a test in the logs.
    * @param text text to print

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java

@@ -69,7 +69,7 @@ public final class EncryptionTestUtils {
                                      final S3AEncryptionMethods algorithm,
                                      final String kmsKeyArn)
           throws IOException {
-    HeadObjectResponse md = fs.getObjectMetadata(path);
+    HeadObjectResponse md = fs.getS3AInternals().getObjectMetadata(path);
     String details = String.format(
             "file %s with encryption algorithm %s and key %s",
             path,

+ 97 - 44
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java

@@ -26,8 +26,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
 
+import org.assertj.core.api.Assertions;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.Timeout;
@@ -41,12 +42,14 @@ import org.slf4j.LoggerFactory;
 import static org.apache.hadoop.fs.s3a.Constants.*;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.getCSVTestPath;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
-import static org.apache.hadoop.fs.s3a.S3AUtils.*;
 import static org.apache.hadoop.fs.s3a.auth.delegation.DelegationConstants.DELEGATION_TOKEN_BINDING;
+import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.CONSTRUCTOR_EXCEPTION;
+import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 import static org.junit.Assert.*;
 
 /**
- * Integration tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic.
+ * Integration tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic
+ * through the S3A Filesystem instantiation process.
  */
 public class ITestS3AAWSCredentialsProvider {
   private static final Logger LOG =
@@ -55,17 +58,21 @@ public class ITestS3AAWSCredentialsProvider {
   @Rule
   public Timeout testTimeout = new Timeout(60_1000, TimeUnit.MILLISECONDS);
 
+  /**
+   * Expecting a wrapped ClassNotFoundException.
+   */
   @Test
-  public void testBadConfiguration() throws IOException {
-    Configuration conf = createConf();
-    conf.set(AWS_CREDENTIALS_PROVIDER, "no.such.class");
-    try {
-      createFailingFS(conf);
-    } catch (IOException e) {
-      if (!(e.getCause() instanceof ClassNotFoundException)) {
-        LOG.error("Unexpected nested cause: {} in {}", e.getCause(), e, e);
-        throw e;
-      }
+  public void testProviderClassNotFound() throws Exception {
+    Configuration conf = createConf("no.such.class");
+    final InstantiationIOException e =
+        intercept(InstantiationIOException.class, "java.lang.ClassNotFoundException", () ->
+            createFailingFS(conf));
+    if (InstantiationIOException.Kind.InstantiationFailure != e.getKind()) {
+      throw e;
+    }
+    if (!(e.getCause() instanceof ClassNotFoundException)) {
+      LOG.error("Unexpected nested cause: {} in {}", e.getCause(), e, e);
+      throw e;
     }
   }
 
@@ -73,10 +80,10 @@ public class ITestS3AAWSCredentialsProvider {
    * A bad CredentialsProvider which has no suitable constructor.
    *
    * This class does not provide a public constructor accepting Configuration,
-   * or a public factory method named getInstance that accepts no arguments,
+   * or a public factory method named create() that accepts no arguments,
    * or a public default constructor.
    */
-  static class BadCredentialsProviderConstructor
+  public static class BadCredentialsProviderConstructor
       implements AwsCredentialsProvider {
 
     @SuppressWarnings("unused")
@@ -92,37 +99,57 @@ public class ITestS3AAWSCredentialsProvider {
 
   @Test
   public void testBadCredentialsConstructor() throws Exception {
-    Configuration conf = createConf();
-    conf.set(AWS_CREDENTIALS_PROVIDER,
-        BadCredentialsProviderConstructor.class.getName());
-    try {
-      createFailingFS(conf);
-    } catch (IOException e) {
-      GenericTestUtils.assertExceptionContains(CONSTRUCTOR_EXCEPTION, e);
+    Configuration conf = createConf(BadCredentialsProviderConstructor.class);
+    final InstantiationIOException ex =
+        intercept(InstantiationIOException.class, CONSTRUCTOR_EXCEPTION, () ->
+            createFailingFS(conf));
+    if (InstantiationIOException.Kind.UnsupportedConstructor != ex.getKind()) {
+      throw ex;
     }
   }
 
-  protected Configuration createConf() {
+  /**
+   * Create a configuration bonded to the given provider classname.
+   * @param provider provider to bond to
+   * @return a configuration
+   */
+  protected Configuration createConf(String provider) {
     Configuration conf = new Configuration();
     removeBaseAndBucketOverrides(conf,
         DELEGATION_TOKEN_BINDING,
         AWS_CREDENTIALS_PROVIDER);
+    conf.set(AWS_CREDENTIALS_PROVIDER, provider);
+    conf.set(DELEGATION_TOKEN_BINDING, "");
     return conf;
   }
 
+  /**
+   * Create a configuration bonded to the given provider class.
+   * @param provider provider to bond to
+   * @return a configuration
+   */
+  protected Configuration createConf(Class provider) {
+    return createConf(provider.getName());
+  }
+
   /**
    * Create a filesystem, expect it to fail by raising an IOException.
    * Raises an assertion exception if in fact the FS does get instantiated.
+   * The FS is always deleted.
    * @param conf configuration
    * @throws IOException an expected exception.
    */
   private void createFailingFS(Configuration conf) throws IOException {
-    S3AFileSystem fs = S3ATestUtils.createTestFileSystem(conf);
-    fs.listStatus(new Path("/"));
-    fail("Expected exception - got " + fs);
+    try(S3AFileSystem fs = S3ATestUtils.createTestFileSystem(conf)) {
+      fs.listStatus(new Path("/"));
+      fail("Expected exception - got " + fs);
+    }
   }
 
-  static class BadCredentialsProvider implements AwsCredentialsProvider {
+  /**
+   * Returns an invalid set of credentials.
+   */
+  public static class BadCredentialsProvider implements AwsCredentialsProvider {
 
     @SuppressWarnings("unused")
     public BadCredentialsProvider(Configuration conf) {
@@ -137,34 +164,60 @@ public class ITestS3AAWSCredentialsProvider {
 
   @Test
   public void testBadCredentials() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set(AWS_CREDENTIALS_PROVIDER, BadCredentialsProvider.class.getName());
-    try {
-      createFailingFS(conf);
-    } catch (AccessDeniedException e) {
-      // expected
-    } catch (AWSServiceIOException e) {
-      GenericTestUtils.assertExceptionContains(
-          "UnrecognizedClientException", e);
-      // expected
-    }
+    Configuration conf = createConf(BadCredentialsProvider.class);
+    intercept(AccessDeniedException.class, "", () ->
+        createFailingFS(conf));
   }
 
+  /**
+   * Test using the anonymous credential provider with the public csv
+   * test file; if the test file path is unset then it will be skipped.
+   */
   @Test
   public void testAnonymousProvider() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set(AWS_CREDENTIALS_PROVIDER,
-        AnonymousAWSCredentialsProvider.class.getName());
+    Configuration conf = createConf(AnonymousAWSCredentialsProvider.class);
     Path testFile = getCSVTestPath(conf);
     try (FileSystem fs = FileSystem.newInstance(testFile.toUri(), conf)) {
-      assertNotNull("S3AFileSystem instance must not be null", fs);
-      assertTrue("FileSystem must be the instance of S3AFileSystem", fs instanceof S3AFileSystem);
+      Assertions.assertThat(fs)
+          .describedAs("Filesystem")
+          .isNotNull();
       FileStatus stat = fs.getFileStatus(testFile);
-      assertNotNull("FileStatus with qualified path must not be null", stat);
       assertEquals(
           "The qualified path returned by getFileStatus should be same as the original file",
           testFile, stat.getPath());
     }
   }
 
+  /**
+   * Create credentials via the create() method.
+   * They are invalid credentials, so IO will fail as access denied.
+   */
+  @Test
+  public void testCredentialsWithCreateMethod() throws Exception {
+    Configuration conf = createConf(CredentialsProviderWithCreateMethod.class);
+    intercept(AccessDeniedException.class, "", () ->
+        createFailingFS(conf));
+  }
+
+  /**
+   * Credentials via the create() method.
+   */
+  public static final class CredentialsProviderWithCreateMethod implements AwsCredentialsProvider {
+
+    public static AwsCredentialsProvider create() {
+      LOG.info("creating CredentialsProviderWithCreateMethod");
+      return new CredentialsProviderWithCreateMethod();
+    }
+
+    /** Private: cannot be created directly. */
+    private CredentialsProviderWithCreateMethod() {
+    }
+
+    @Override
+    public AwsCredentials resolveCredentials() {
+      return AwsBasicCredentials.create("bad_key", "bad_secret");
+    }
+
+  }
+
 }

+ 8 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABucketExistence.java

@@ -36,8 +36,10 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
 import static org.apache.hadoop.fs.s3a.Constants.AWS_REGION;
 import static org.apache.hadoop.fs.s3a.Constants.AWS_S3_ACCESSPOINT_REQUIRED;
+import static org.apache.hadoop.fs.s3a.Constants.ENDPOINT;
 import static org.apache.hadoop.fs.s3a.Constants.FS_S3A;
 import static org.apache.hadoop.fs.s3a.Constants.S3A_BUCKET_PROBE;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 
 /**
@@ -124,8 +126,12 @@ public class ITestS3ABucketExistence extends AbstractS3ATestBase {
   private Configuration createConfigurationWithProbe(final int probe) {
     Configuration conf = new Configuration(getFileSystem().getConf());
     S3ATestUtils.disableFilesystemCaching(conf);
+    removeBaseAndBucketOverrides(conf,
+        S3A_BUCKET_PROBE,
+        ENDPOINT,
+        AWS_REGION);
     conf.setInt(S3A_BUCKET_PROBE, probe);
-    conf.set(AWS_REGION, "eu-west-1");
+    conf.set(AWS_REGION, EU_WEST_1);
     return conf;
   }
 
@@ -204,7 +210,7 @@ public class ITestS3ABucketExistence extends AbstractS3ATestBase {
    */
   private Configuration createArnConfiguration() {
     Configuration configuration = createConfigurationWithProbe(2);
-    configuration.set(AWS_REGION, "eu-west-1");
+    configuration.set(AWS_REGION, EU_WEST_1);
     return configuration;
   }
 

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACannedACLs.java

@@ -90,7 +90,7 @@ public class ITestS3ACannedACLs extends AbstractS3ATestBase {
     S3AFileSystem fs = getFileSystem();
 
     StoreContext storeContext = fs.createStoreContext();
-    S3Client s3 = fs.getAmazonS3ClientForTesting("acls");
+    S3Client s3 = getS3AInternals().getAmazonS3V2ClientForTesting("acls");
     String key = storeContext.pathToKey(path);
     if (!isFile) {
       key = key + "/";

+ 54 - 46
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java

@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -18,6 +18,18 @@
 
 package org.apache.hadoop.fs.s3a;
 
+import java.io.File;
+import java.net.URI;
+import java.nio.file.AccessDeniedException;
+import java.security.PrivilegedExceptionAction;
+
+import org.assertj.core.api.Assertions;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.rules.Timeout;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import software.amazon.awssdk.core.client.config.SdkClientConfiguration;
 import software.amazon.awssdk.core.client.config.SdkClientOption;
 import software.amazon.awssdk.core.interceptor.ExecutionAttributes;
@@ -26,7 +38,6 @@ import software.amazon.awssdk.http.SdkHttpFullRequest;
 import software.amazon.awssdk.services.s3.S3Client;
 import software.amazon.awssdk.services.s3.S3Configuration;
 import software.amazon.awssdk.services.s3.model.HeadBucketRequest;
-import software.amazon.awssdk.services.s3.model.S3Exception;
 import software.amazon.awssdk.services.sts.StsClient;
 import software.amazon.awssdk.services.sts.model.StsException;
 
@@ -38,29 +49,17 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
 import org.apache.hadoop.fs.s3a.auth.STSClientFactory;
 import org.apache.hadoop.fs.s3native.S3xLoginHelper;
-import org.apache.hadoop.test.GenericTestUtils;
-
-import org.assertj.core.api.Assertions;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.File;
-import java.net.URI;
-import java.security.PrivilegedExceptionAction;
-
 import org.apache.hadoop.security.ProviderUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.alias.CredentialProvider;
 import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.http.HttpStatus;
-import org.junit.rules.TemporaryFolder;
 
+import static java.util.Objects.requireNonNull;
 import static org.apache.hadoop.fs.s3a.Constants.*;
+import static org.apache.hadoop.fs.s3a.S3ATestConstants.EU_WEST_1;
 import static org.apache.hadoop.fs.s3a.S3AUtils.*;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
@@ -91,6 +90,23 @@ public class ITestS3AConfiguration {
   @Rule
   public final TemporaryFolder tempDir = new TemporaryFolder();
 
+  /**
+   * Get the S3 client of the active filesystem.
+   * @param reason why?
+   * @return the client
+   */
+  private S3Client getS3Client(String reason) {
+    return requireNonNull(getS3AInternals().getAmazonS3V2ClientForTesting(reason));
+  }
+
+  /**
+   * Get the internals of the active filesystem.
+   * @return the internals
+   */
+  private S3AInternals getS3AInternals() {
+    return fs.getS3AInternals();
+  }
+
   /**
    * Test if custom endpoint is picked up.
    * <p>
@@ -118,7 +134,6 @@ public class ITestS3AConfiguration {
     } else {
       conf.set(Constants.ENDPOINT, endpoint);
       fs = S3ATestUtils.createTestFileSystem(conf);
-      S3Client s3 = fs.getAmazonS3ClientForTesting("test endpoint");
       String endPointRegion = "";
       // Differentiate handling of "s3-" and "s3." based endpoint identifiers
       String[] endpointParts = StringUtils.split(endpoint, '.');
@@ -129,9 +144,7 @@ public class ITestS3AConfiguration {
       } else {
         fail("Unexpected endpoint");
       }
-      // TODO: review way to get the bucket region.
-      String region = s3.getBucketLocation(b -> b.bucket(fs.getUri().getHost()))
-          .locationConstraintAsString();
+      String region = getS3AInternals().getBucketLocation();
       assertEquals("Endpoint config setting and bucket location differ: ",
           endPointRegion, region);
     }
@@ -159,7 +172,7 @@ public class ITestS3AConfiguration {
   }
 
   /**
-   * Expect a filesystem to not be created from a configuration
+   * Expect a filesystem to not be created from a configuration.
    * @return the exception intercepted
    * @throws Exception any other exception
    */
@@ -358,8 +371,7 @@ public class ITestS3AConfiguration {
     try {
       fs = S3ATestUtils.createTestFileSystem(conf);
       assertNotNull(fs);
-      S3Client s3 = fs.getAmazonS3ClientForTesting("configuration");
-      assertNotNull(s3);
+      S3Client s3 = getS3Client("configuration");
 
       SdkClientConfiguration clientConfiguration = getField(s3, SdkClientConfiguration.class,
           "clientConfiguration");
@@ -393,8 +405,7 @@ public class ITestS3AConfiguration {
     conf = new Configuration();
     fs = S3ATestUtils.createTestFileSystem(conf);
     assertNotNull(fs);
-    S3Client s3 = fs.getAmazonS3ClientForTesting("User Agent");
-    assertNotNull(s3);
+    S3Client s3 = getS3Client("User Agent");
     SdkClientConfiguration clientConfiguration = getField(s3, SdkClientConfiguration.class,
         "clientConfiguration");
     Assertions.assertThat(clientConfiguration.option(SdkClientOption.CLIENT_USER_AGENT))
@@ -408,8 +419,7 @@ public class ITestS3AConfiguration {
     conf.set(Constants.USER_AGENT_PREFIX, "MyApp");
     fs = S3ATestUtils.createTestFileSystem(conf);
     assertNotNull(fs);
-    S3Client s3 = fs.getAmazonS3ClientForTesting("User agent");
-    assertNotNull(s3);
+    S3Client s3 = getS3Client("User agent");
     SdkClientConfiguration clientConfiguration = getField(s3, SdkClientConfiguration.class,
         "clientConfiguration");
     Assertions.assertThat(clientConfiguration.option(SdkClientOption.CLIENT_USER_AGENT))
@@ -422,7 +432,7 @@ public class ITestS3AConfiguration {
     conf = new Configuration();
     conf.set(REQUEST_TIMEOUT, "120");
     fs = S3ATestUtils.createTestFileSystem(conf);
-    S3Client s3 = fs.getAmazonS3ClientForTesting("Request timeout (ms)");
+    S3Client s3 = getS3Client("Request timeout (ms)");
     SdkClientConfiguration clientConfiguration = getField(s3, SdkClientConfiguration.class,
         "clientConfiguration");
     assertEquals("Configured " + REQUEST_TIMEOUT +
@@ -436,7 +446,7 @@ public class ITestS3AConfiguration {
     conf = new Configuration();
     fs = S3ATestUtils.createTestFileSystem(conf);
     AWSCredentialProviderList credentials =
-        fs.shareCredentials("testCloseIdempotent");
+        getS3AInternals().shareCredentials("testCloseIdempotent");
     credentials.close();
     fs.close();
     assertTrue("Closing FS didn't close credentials " + credentials,
@@ -529,36 +539,34 @@ public class ITestS3AConfiguration {
   }
 
   @Test(timeout = 10_000L)
-  public void testS3SpecificSignerOverride() throws IOException {
+  public void testS3SpecificSignerOverride() throws Exception {
     Configuration config = new Configuration();
+    removeBaseAndBucketOverrides(config,
+        CUSTOM_SIGNERS, SIGNING_ALGORITHM_S3, SIGNING_ALGORITHM_STS, AWS_REGION);
 
     config.set(CUSTOM_SIGNERS,
-        "CustomS3Signer:" + CustomS3Signer.class.getName() + ",CustomSTSSigner:"
-            + CustomSTSSigner.class.getName());
+        "CustomS3Signer:" + CustomS3Signer.class.getName()
+            + ",CustomSTSSigner:" + CustomSTSSigner.class.getName());
 
     config.set(SIGNING_ALGORITHM_S3, "CustomS3Signer");
     config.set(SIGNING_ALGORITHM_STS, "CustomSTSSigner");
 
-    config.set(AWS_REGION, "eu-west-1");
+    config.set(AWS_REGION, EU_WEST_1);
     fs = S3ATestUtils.createTestFileSystem(config);
 
-    S3Client s3Client = fs.getAmazonS3ClientForTesting("testS3SpecificSignerOverride");
+    S3Client s3Client = getS3Client("testS3SpecificSignerOverride");
 
+    final String bucket = fs.getBucket();
     StsClient stsClient =
-        STSClientFactory.builder(config, fs.getBucket(), new AnonymousAWSCredentialsProvider(), "",
+        STSClientFactory.builder(config, bucket, new AnonymousAWSCredentialsProvider(), "",
             "").build();
 
-    try {
-      stsClient.getSessionToken();
-    } catch (StsException exception) {
-      // Expected 403, as credentials are not provided.
-    }
+    intercept(StsException.class, "", () ->
+        stsClient.getSessionToken());
 
-    try {
-      s3Client.headBucket(HeadBucketRequest.builder().bucket(fs.getBucket()).build());
-    } catch (S3Exception exception) {
-      // Expected 403, as credentials are not provided.
-    }
+    intercept(AccessDeniedException.class, "", () ->
+        Invoker.once("head", bucket, () ->
+            s3Client.headBucket(HeadBucketRequest.builder().bucket(bucket).build())));
 
     Assertions.assertThat(CustomS3Signer.isS3SignerCalled())
         .describedAs("Custom S3 signer not called").isTrue();

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java

@@ -51,7 +51,7 @@ public class ITestS3AEncryptionSSEKMSDefaultKey
 
   @Override
   protected void assertEncrypted(Path path) throws IOException {
-    HeadObjectResponse md = getFileSystem().getObjectMetadata(path);
+    HeadObjectResponse md = getS3AInternals().getObjectMetadata(path);
     assertEquals("SSE Algorithm", EncryptionTestUtils.AWS_KMS_SSE_ALGORITHM,
             md.serverSideEncryptionAsString());
     assertThat(md.ssekmsKeyId(), containsString("arn:aws:kms:"));

+ 2 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java

@@ -118,7 +118,8 @@ public class ITestS3AEncryptionWithDefaultS3Settings extends
     S3AFileSystem fs = getFileSystem();
     Path path = path(getMethodName() + "find-encryption-algo");
     ContractTestUtils.touch(fs, path);
-    String sseAlgorithm = fs.getObjectMetadata(path).serverSideEncryptionAsString();
+    String sseAlgorithm = getS3AInternals().getObjectMetadata(path)
+        .serverSideEncryptionAsString();
     if(StringUtils.isBlank(sseAlgorithm) ||
             !sseAlgorithm.equals(AWS_KMS_SSE_ALGORITHM)) {
       skip("Test bucket is not configured with " + AWS_KMS_SSE_ALGORITHM);

+ 1 - 10
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java

@@ -25,7 +25,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.assertj.core.api.Assertions;
-import org.junit.Assert;
 import org.junit.Test;
 import software.amazon.awssdk.awscore.AwsExecutionAttribute;
 import software.amazon.awssdk.awscore.exception.AwsServiceException;
@@ -34,14 +33,12 @@ import software.amazon.awssdk.core.interceptor.ExecutionAttributes;
 import software.amazon.awssdk.core.interceptor.ExecutionInterceptor;
 import software.amazon.awssdk.services.s3.S3Client;
 import software.amazon.awssdk.services.s3.model.HeadBucketRequest;
-import software.amazon.awssdk.services.s3.model.S3Exception;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.s3a.statistics.impl.EmptyS3AStatisticsContext;
 
 import static org.apache.hadoop.fs.s3a.Constants.AWS_REGION;
 import static org.apache.hadoop.fs.s3a.Statistic.STORE_REGION_PROBE;
-import static org.apache.hadoop.fs.s3a.impl.InternalConstants.SC_301_MOVED_PERMANENTLY;
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 
 /**
@@ -68,13 +65,7 @@ public class ITestS3AEndpointRegion extends AbstractS3ATestBase {
     S3AFileSystem fs = new S3AFileSystem();
     fs.initialize(getFileSystem().getUri(), conf);
 
-    try  {
-      fs.getBucketMetadata();
-    } catch (S3Exception exception) {
-      if (exception.statusCode() == SC_301_MOVED_PERMANENTLY) {
-        Assert.fail(exception.toString());
-      }
-    }
+    fs.getS3AInternals().getBucketMetadata();
 
     Assertions.assertThat(fs.getInstrumentation().getCounterValue(STORE_REGION_PROBE))
         .describedAs("Region is not configured, region probe should have been made").isEqualTo(1);

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java

@@ -411,8 +411,8 @@ public class ITestS3AMiscOperations extends AbstractS3ATestBase {
    */
   private GetBucketEncryptionResponse getDefaultEncryption() throws IOException {
     S3AFileSystem fs = getFileSystem();
-    S3Client s3 = fs.getAmazonS3ClientForTesting("check default encryption");
-    try {
+    S3Client s3 = getS3AInternals().getAmazonS3V2ClientForTesting("check default encryption");
+    try (AuditSpan s = span()){
       return Invoker.once("getBucketEncryption()",
           fs.getBucket(),
           () -> s3.getBucketEncryption(GetBucketEncryptionRequest.builder()

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java

@@ -115,7 +115,7 @@ public class ITestS3ATemporaryCredentials extends AbstractS3ATestBase {
   public void testSTS() throws IOException {
     Configuration conf = getContract().getConf();
     S3AFileSystem testFS = getFileSystem();
-    credentials = testFS.shareCredentials("testSTS");
+    credentials = getS3AInternals().shareCredentials("testSTS");
 
     String bucket = testFS.getBucket();
     StsClientBuilder builder = STSClientFactory.builder(
@@ -363,7 +363,7 @@ public class ITestS3ATemporaryCredentials extends AbstractS3ATestBase {
       final String region,
       final String exceptionText) throws Exception {
     try(AWSCredentialProviderList parentCreds =
-            getFileSystem().shareCredentials("test");
+            getS3AInternals().shareCredentials("test");
         DurationInfo ignored = new DurationInfo(LOG, "requesting credentials")) {
       Configuration conf = new Configuration(getContract().getConf());
 

+ 5 - 0
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java

@@ -251,4 +251,9 @@ public interface S3ATestConstants {
    * Value: {@value}.
    */
   String PROJECT_BUILD_DIRECTORY_PROPERTY = "project.build.directory";
+
+  /**
+   * AWS ireland region.
+   */
+  String EU_WEST_1 = "eu-west-1";
 }

+ 137 - 116
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java

@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -30,42 +30,46 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
 import javax.annotation.Nullable;
 
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.AWSCredentialsProvider;
-import com.amazonaws.auth.ContainerCredentialsProvider;
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
 import software.amazon.awssdk.auth.credentials.AwsCredentials;
 import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
 import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider;
 import software.amazon.awssdk.auth.credentials.InstanceProfileCredentialsProvider;
 
-import org.apache.hadoop.fs.s3a.adapter.V1V2AwsCredentialProviderAdapter;
 import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.s3a.auth.AbstractSessionCredentialsProvider;
 import org.apache.hadoop.fs.s3a.auth.AssumedRoleCredentialProvider;
+import org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider;
 import org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException;
+import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
 import org.apache.hadoop.io.retry.RetryPolicy;
 
-import static org.apache.hadoop.fs.s3a.Constants.*;
-import static org.apache.hadoop.fs.s3a.S3ATestConstants.*;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
-import static org.apache.hadoop.fs.s3a.S3AUtils.*;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.ABSTRACT_PROVIDER;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.NOT_AWS_V2_PROVIDER;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.STANDARD_AWS_PROVIDERS;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.buildAWSProviderList;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.createAWSCredentialProviderSet;
+import static org.apache.hadoop.fs.s3a.Constants.ASSUMED_ROLE_CREDENTIALS_PROVIDER;
+import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
+import static org.apache.hadoop.fs.s3a.S3ATestConstants.DEFAULT_CSVTEST_FILE;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.authenticationContains;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.buildClassListString;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getCSVTestPath;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.STANDARD_AWS_PROVIDERS;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.buildAWSProviderList;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.createAWSCredentialProviderList;
+import static org.apache.hadoop.fs.s3a.impl.InstantiationIOException.DOES_NOT_IMPLEMENT;
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 import static org.apache.hadoop.test.LambdaTestUtils.interceptFuture;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Unit tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic.
@@ -78,19 +82,18 @@ public class TestS3AAWSCredentialsProvider {
   private static final URI TESTFILE_URI = new Path(
       DEFAULT_CSVTEST_FILE).toUri();
 
-  @Rule
-  public ExpectedException exception = ExpectedException.none();
+  private static final Logger LOG = LoggerFactory.getLogger(TestS3AAWSCredentialsProvider.class);
 
   @Test
   public void testProviderWrongClass() throws Exception {
     expectProviderInstantiationFailure(this.getClass(),
-        NOT_AWS_V2_PROVIDER);
+        DOES_NOT_IMPLEMENT + " software.amazon.awssdk.auth.credentials.AwsCredentialsProvider");
   }
 
   @Test
   public void testProviderAbstractClass() throws Exception {
     expectProviderInstantiationFailure(AbstractProvider.class,
-        ABSTRACT_PROVIDER);
+        InstantiationIOException.ABSTRACT_PROVIDER);
   }
 
   @Test
@@ -103,14 +106,14 @@ public class TestS3AAWSCredentialsProvider {
   public void testProviderConstructorError() throws Exception {
     expectProviderInstantiationFailure(
         ConstructorSignatureErrorProvider.class,
-        CONSTRUCTOR_EXCEPTION);
+        InstantiationIOException.CONSTRUCTOR_EXCEPTION);
   }
 
   @Test
   public void testProviderFailureError() throws Exception {
     expectProviderInstantiationFailure(
         ConstructorFailureProvider.class,
-        INSTANTIATION_EXCEPTION);
+        InstantiationIOException.INSTANTIATION_EXCEPTION);
   }
 
   @Test
@@ -122,7 +125,7 @@ public class TestS3AAWSCredentialsProvider {
             + " ,\n " + AnonymousAWSCredentialsProvider.NAME);
     Path testFile = getCSVTestPath(conf);
 
-    AWSCredentialProviderList list = createAWSCredentialProviderSet(
+    AWSCredentialProviderList list = createAWSCredentialProviderList(
         testFile.toUri(), conf);
     List<Class<?>> expectedClasses =
         Arrays.asList(
@@ -138,9 +141,9 @@ public class TestS3AAWSCredentialsProvider {
     Configuration conf = new Configuration(false);
     // use the default credential provider chain
     conf.unset(AWS_CREDENTIALS_PROVIDER);
-    AWSCredentialProviderList list1 = createAWSCredentialProviderSet(
+    AWSCredentialProviderList list1 = createAWSCredentialProviderList(
         uri1, conf);
-    AWSCredentialProviderList list2 = createAWSCredentialProviderSet(
+    AWSCredentialProviderList list2 = createAWSCredentialProviderList(
         uri2, conf);
     List<Class<?>> expectedClasses = STANDARD_AWS_PROVIDERS;
     assertCredentialProviders(expectedClasses, list1);
@@ -153,28 +156,7 @@ public class TestS3AAWSCredentialsProvider {
     // use the default credential provider chain
     conf.unset(AWS_CREDENTIALS_PROVIDER);
     assertCredentialProviders(STANDARD_AWS_PROVIDERS,
-        createAWSCredentialProviderSet(null, conf));
-  }
-
-  @Test
-  public void testConfiguredChainV1V2() throws Exception {
-    URI uri1 = new URI("s3a://bucket1"), uri2 = new URI("s3a://bucket2");
-    List<Class<?>> credentialProviders =
-        Arrays.asList(
-            ContainerCredentialsProvider.class,
-            AnonymousAWSCredentialsProvider.class);
-    List<Class<?>> expectedClasses =
-        Arrays.asList(
-            V1V2AwsCredentialProviderAdapter.class,
-            AnonymousAWSCredentialsProvider.class);
-    Configuration conf =
-        createProviderConfiguration(buildClassListString(credentialProviders));
-    AWSCredentialProviderList list1 = createAWSCredentialProviderSet(
-        uri1, conf);
-    AWSCredentialProviderList list2 = createAWSCredentialProviderSet(
-        uri2, conf);
-    assertCredentialProviders(expectedClasses, list1);
-    assertCredentialProviders(expectedClasses, list2);
+        createAWSCredentialProviderList(null, conf));
   }
 
   @Test
@@ -182,14 +164,15 @@ public class TestS3AAWSCredentialsProvider {
     URI uri1 = new URI("s3a://bucket1"), uri2 = new URI("s3a://bucket2");
     List<Class<?>> expectedClasses =
         Arrays.asList(
-            EnvironmentVariableCredentialsProvider.class,
-            InstanceProfileCredentialsProvider.class,
-            AnonymousAWSCredentialsProvider.class);
+            IAMInstanceCredentialsProvider.class,
+            AnonymousAWSCredentialsProvider.class,
+            EnvironmentVariableCredentialsProvider.class
+        );
     Configuration conf =
         createProviderConfiguration(buildClassListString(expectedClasses));
-    AWSCredentialProviderList list1 = createAWSCredentialProviderSet(
+    AWSCredentialProviderList list1 = createAWSCredentialProviderList(
         uri1, conf);
-    AWSCredentialProviderList list2 = createAWSCredentialProviderSet(
+    AWSCredentialProviderList list2 = createAWSCredentialProviderList(
         uri2, conf);
     assertCredentialProviders(expectedClasses, list1);
     assertCredentialProviders(expectedClasses, list2);
@@ -203,9 +186,9 @@ public class TestS3AAWSCredentialsProvider {
         Arrays.asList(
             InstanceProfileCredentialsProvider.class);
     conf.set(AWS_CREDENTIALS_PROVIDER, buildClassListString(expectedClasses));
-    AWSCredentialProviderList list1 = createAWSCredentialProviderSet(
+    AWSCredentialProviderList list1 = createAWSCredentialProviderList(
         uri1, conf);
-    AWSCredentialProviderList list2 = createAWSCredentialProviderSet(
+    AWSCredentialProviderList list2 = createAWSCredentialProviderList(
         uri2, conf);
     assertCredentialProviders(expectedClasses, list1);
     assertCredentialProviders(expectedClasses, list2);
@@ -222,51 +205,75 @@ public class TestS3AAWSCredentialsProvider {
             EnvironmentVariableCredentialsProvider.class),
         Sets.newHashSet());
     assertTrue("empty credentials", credentials.size() > 0);
+  }
+
+  @Test
+  public void testProviderConstructor() throws Throwable {
+    final AWSCredentialProviderList list = new AWSCredentialProviderList("name",
+        new AnonymousAWSCredentialsProvider(),
+        new ErrorProvider(TESTFILE_URI, new Configuration()));
+    Assertions.assertThat(list.getProviders())
+        .describedAs("provider list in %s", list)
+        .hasSize(2);
+    final AwsCredentials credentials = list.resolveCredentials();
+    Assertions.assertThat(credentials)
+        .isInstanceOf(AwsBasicCredentials.class);
+    assertCredentialResolution(credentials, null, null);
+  }
+
+  public static void assertCredentialResolution(AwsCredentials creds, String key, String secret) {
+    Assertions.assertThat(creds.accessKeyId())
+        .describedAs("access key of %s", creds)
+        .isEqualTo(key);
+    Assertions.assertThat(creds.secretAccessKey())
+        .describedAs("secret key of %s", creds)
+        .isEqualTo(secret);
+  }
+
+  private String buildClassList(Class... classes) {
+    return Arrays.stream(classes)
+        .map(Class::getCanonicalName)
+        .collect(Collectors.joining(","));
+  }
 
+  private String buildClassList(String... classes) {
+    return Arrays.stream(classes)
+        .collect(Collectors.joining(","));
   }
 
   /**
    * A credential provider declared as abstract, so it cannot be instantiated.
    */
-  static abstract class AbstractProvider implements AWSCredentialsProvider {
+  static abstract class AbstractProvider implements AwsCredentialsProvider {
+
+    @Override
+    public AwsCredentials resolveCredentials() {
+      return null;
+    }
   }
 
   /**
    * A credential provider whose constructor signature doesn't match.
    */
   protected static class ConstructorSignatureErrorProvider
-      implements AWSCredentialsProvider {
+      extends AbstractProvider {
 
     @SuppressWarnings("unused")
     public ConstructorSignatureErrorProvider(String str) {
     }
-
-    @Override
-    public AWSCredentials getCredentials() {
-      return null;
-    }
-
-    @Override
-    public void refresh() {
-    }
   }
 
   /**
    * A credential provider whose constructor raises an NPE.
    */
   protected static class ConstructorFailureProvider
-      implements AwsCredentialsProvider {
+      extends AbstractProvider {
 
     @SuppressWarnings("unused")
     public ConstructorFailureProvider() {
       throw new NullPointerException("oops");
     }
 
-    @Override
-    public AwsCredentials resolveCredentials() {
-      return null;
-    }
-
   }
 
   @Test
@@ -279,33 +286,23 @@ public class TestS3AAWSCredentialsProvider {
     }
   }
 
-  protected static class AWSExceptionRaisingFactory implements AWSCredentialsProvider {
+  protected static class AWSExceptionRaisingFactory extends AbstractProvider {
 
     public static final String NO_AUTH = "No auth";
 
-    public static AWSCredentialsProvider getInstance() {
+    public static AwsCredentialsProvider create() {
       throw new NoAuthWithAWSException(NO_AUTH);
     }
-
-    @Override
-    public AWSCredentials getCredentials() {
-      return null;
-    }
-
-    @Override
-    public void refresh() {
-
-    }
   }
 
   @Test
   public void testFactoryWrongType() throws Throwable {
     expectProviderInstantiationFailure(
         FactoryOfWrongType.class,
-        CONSTRUCTOR_EXCEPTION);
+        InstantiationIOException.CONSTRUCTOR_EXCEPTION);
   }
 
-  static class FactoryOfWrongType implements AWSCredentialsProvider {
+  static class FactoryOfWrongType extends AbstractProvider {
 
     public static final String NO_AUTH = "No auth";
 
@@ -314,14 +311,10 @@ public class TestS3AAWSCredentialsProvider {
     }
 
     @Override
-    public AWSCredentials getCredentials() {
+    public AwsCredentials resolveCredentials() {
       return null;
     }
 
-    @Override
-    public void refresh() {
-
-    }
   }
 
   /**
@@ -334,7 +327,7 @@ public class TestS3AAWSCredentialsProvider {
   private IOException expectProviderInstantiationFailure(String option,
       String expectedErrorText) throws Exception {
     return intercept(IOException.class, expectedErrorText,
-        () -> createAWSCredentialProviderSet(
+        () -> createAWSCredentialProviderList(
             TESTFILE_URI,
             createProviderConfiguration(option)));
   }
@@ -385,7 +378,9 @@ public class TestS3AAWSCredentialsProvider {
       AWSCredentialProviderList list) {
     assertNotNull(list);
     List<AwsCredentialsProvider> providers = list.getProviders();
-    assertEquals(expectedClasses.size(), providers.size());
+    Assertions.assertThat(providers)
+        .describedAs("providers")
+        .hasSize(expectedClasses.size());
     for (int i = 0; i < expectedClasses.size(); ++i) {
       Class<?> expectedClass =
           expectedClasses.get(i);
@@ -493,22 +488,13 @@ public class TestS3AAWSCredentialsProvider {
   /**
    * Credential provider which raises an IOE when constructed.
    */
-  protected static class IOERaisingProvider implements AWSCredentialsProvider {
+  protected static class IOERaisingProvider extends AbstractProvider {
 
     public IOERaisingProvider(URI uri, Configuration conf)
         throws IOException {
       throw new InterruptedIOException("expected");
     }
 
-    @Override
-    public AWSCredentials getCredentials() {
-      return null;
-    }
-
-    @Override
-    public void refresh() {
-
-    }
   }
 
   private static final AwsCredentials EXPECTED_CREDENTIALS =
@@ -538,13 +524,13 @@ public class TestS3AAWSCredentialsProvider {
     Configuration conf = createProviderConfiguration(SlowProvider.class.getName());
     Path testFile = getCSVTestPath(conf);
 
-    AWSCredentialProviderList list = createAWSCredentialProviderSet(testFile.toUri(), conf);
+    AWSCredentialProviderList list = createAWSCredentialProviderList(testFile.toUri(), conf);
 
     SlowProvider provider = (SlowProvider) list.getProviders().get(0);
 
     ExecutorService pool = Executors.newFixedThreadPool(CONCURRENT_THREADS);
 
-    List<Future<AWSCredentials>> results = new ArrayList<>();
+    List<Future<AwsCredentials>> results = new ArrayList<>();
 
     try {
       assertFalse(
@@ -560,15 +546,15 @@ public class TestS3AAWSCredentialsProvider {
       }
 
       for (int i = 0; i < CONCURRENT_THREADS; i++) {
-        results.add(pool.submit(() -> list.getCredentials()));
+        results.add(pool.submit(() -> list.resolveCredentials()));
       }
 
-      for (Future<AWSCredentials> result : results) {
-        AWSCredentials credentials = result.get();
+      for (Future<AwsCredentials> result : results) {
+        AwsCredentials credentials = result.get();
         assertEquals("Access key from credential provider",
-                "expectedAccessKey", credentials.getAWSAccessKeyId());
+            "expectedAccessKey", credentials.accessKeyId());
         assertEquals("Secret key from credential provider",
-                "expectedSecret", credentials.getAWSSecretKey());
+            "expectedSecret", credentials.secretAccessKey());
       }
     } finally {
       pool.awaitTermination(10, TimeUnit.SECONDS);
@@ -577,7 +563,7 @@ public class TestS3AAWSCredentialsProvider {
 
     assertTrue(
         "Provider initialized without errors. isInitialized should be true",
-         provider.isInitialized());
+        provider.isInitialized());
     assertTrue(
         "Provider initialized without errors. hasCredentials should be true",
         provider.hasCredentials());
@@ -608,12 +594,12 @@ public class TestS3AAWSCredentialsProvider {
     Configuration conf = createProviderConfiguration(ErrorProvider.class.getName());
     Path testFile = getCSVTestPath(conf);
 
-    AWSCredentialProviderList list = createAWSCredentialProviderSet(testFile.toUri(), conf);
+    AWSCredentialProviderList list = createAWSCredentialProviderList(testFile.toUri(), conf);
     ErrorProvider provider = (ErrorProvider) list.getProviders().get(0);
 
     ExecutorService pool = Executors.newFixedThreadPool(CONCURRENT_THREADS);
 
-    List<Future<AWSCredentials>> results = new ArrayList<>();
+    List<Future<AwsCredentials>> results = new ArrayList<>();
 
     try {
       assertFalse("Provider not initialized. isInitialized should be false",
@@ -627,10 +613,10 @@ public class TestS3AAWSCredentialsProvider {
       }
 
       for (int i = 0; i < CONCURRENT_THREADS; i++) {
-        results.add(pool.submit(() -> list.getCredentials()));
+        results.add(pool.submit(() -> list.resolveCredentials()));
       }
 
-      for (Future<AWSCredentials> result : results) {
+      for (Future<AwsCredentials> result : results) {
         interceptFuture(CredentialInitializationException.class,
             "expected error",
             result
@@ -651,4 +637,39 @@ public class TestS3AAWSCredentialsProvider {
         "Provider initialization failed. getInitializationException should contain the error",
         provider.getInitializationException().getMessage().contains("expected error"));
   }
+
+
+  /**
+   * V2 Credentials whose factory method raises ClassNotFoundException.
+   * This will fall back to an attempted v1 load which will fail because it
+   * is the wrong type.
+   * The exception raised will be from the v2 instantiation attempt,
+   * not the v1 attempt.
+   */
+  @Test
+  public void testV2ClassNotFound() throws Throwable {
+    InstantiationIOException expected = intercept(InstantiationIOException.class,
+        "simulated v2 CNFE",
+        () -> createAWSCredentialProviderList(
+            TESTFILE_URI,
+            createProviderConfiguration(V2CredentialProviderDoesNotInstantiate.class.getName())));
+    // print for the curious
+    LOG.info("{}", expected.toString());
+  }
+
+  /**
+   * V2 credentials which raises an instantiation exception in
+   * the factory method.
+   */
+  public static final class V2CredentialProviderDoesNotInstantiate
+      extends AbstractProvider {
+
+    private V2CredentialProviderDoesNotInstantiate() {
+    }
+
+    public static AwsCredentialsProvider create() throws ClassNotFoundException {
+      throw new ClassNotFoundException("simulated v2 CNFE");
+    }
+  }
+
 }

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java

@@ -75,7 +75,7 @@ public class TestS3ADeleteOnExit extends AbstractS3AMockTest {
     // unset S3CSE property from config to avoid pathIOE.
     conf.unset(Constants.S3_ENCRYPTION_ALGORITHM);
     testFs.initialize(uri, conf);
-    S3Client testS3 = testFs.getAmazonS3ClientForTesting("mocking");
+    S3Client testS3 = testFs.getS3AInternals().getAmazonS3V2ClientForTesting("mocking");
 
     Path path = new Path("/file");
     String key = path.toUri().getPath().substring(1);

+ 222 - 0
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/adapter/TestV1CredentialsProvider.java

@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a.adapter;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSCredentialsProvider;
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
+import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.AWSCredentialProviderList;
+import org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider;
+import org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider;
+import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
+
+import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
+import static org.apache.hadoop.fs.s3a.S3ATestConstants.DEFAULT_CSVTEST_FILE;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.ANONYMOUS_CREDENTIALS_V1;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.EC2_CONTAINER_CREDENTIALS_V1;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.ENVIRONMENT_CREDENTIALS_V1;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.createAWSCredentialProviderList;
+import static org.apache.hadoop.test.LambdaTestUtils.intercept;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Unit tests for v1 to v2 credential provider logic.
+ */
+public class TestV1CredentialsProvider {
+
+  /**
+   * URI of the landsat images.
+   */
+  private static final URI TESTFILE_URI = new Path(
+      DEFAULT_CSVTEST_FILE).toUri();
+
+  private static final Logger LOG = LoggerFactory.getLogger(TestV1CredentialsProvider.class);
+
+
+  @Test
+  public void testV1V2Mapping() throws Exception {
+    URI uri1 = new URI("s3a://bucket1");
+
+    List<Class<?>> expectedClasses =
+        Arrays.asList(
+            IAMInstanceCredentialsProvider.class,
+            AnonymousAWSCredentialsProvider.class,
+            EnvironmentVariableCredentialsProvider.class);
+    Configuration conf =
+        createProviderConfiguration(buildClassList(
+            EC2_CONTAINER_CREDENTIALS_V1,
+            ANONYMOUS_CREDENTIALS_V1,
+            ENVIRONMENT_CREDENTIALS_V1));
+    AWSCredentialProviderList list1 = createAWSCredentialProviderList(
+        uri1, conf);
+    assertCredentialProviders(expectedClasses, list1);
+  }
+
+  @Test
+  public void testV1Wrapping() throws Exception {
+    URI uri1 = new URI("s3a://bucket1");
+
+    List<Class<?>> expectedClasses =
+        Arrays.asList(
+            V1ToV2AwsCredentialProviderAdapter.class,
+            V1ToV2AwsCredentialProviderAdapter.class);
+    Configuration conf =
+        createProviderConfiguration(buildClassList(
+            LegacyV1CredentialProvider.class.getName(),
+            LegacyV1CredentialProviderWithConf.class.getName()));
+    AWSCredentialProviderList list1 = createAWSCredentialProviderList(
+        uri1, conf);
+    assertCredentialProviders(expectedClasses, list1);
+  }
+
+  private String buildClassList(String... classes) {
+    return Arrays.stream(classes)
+        .collect(Collectors.joining(","));
+  }
+
+
+  /**
+   * Expect a provider to raise an exception on failure.
+   * @param option aws provider option string.
+   * @param expectedErrorText error text to expect
+   * @return the exception raised
+   * @throws Exception any unexpected exception thrown.
+   */
+  private IOException expectProviderInstantiationFailure(String option,
+      String expectedErrorText) throws Exception {
+    return intercept(IOException.class, expectedErrorText,
+        () -> createAWSCredentialProviderList(
+            TESTFILE_URI,
+            createProviderConfiguration(option)));
+  }
+
+  /**
+   * Create a configuration with a specific provider.
+   * @param providerOption option for the aws credential provider option.
+   * @return a configuration to use in test cases
+   */
+  private Configuration createProviderConfiguration(
+      final String providerOption) {
+    Configuration conf = new Configuration(false);
+    conf.set(AWS_CREDENTIALS_PROVIDER, providerOption);
+    return conf;
+  }
+
+  /**
+   * Asserts expected provider classes in list.
+   * @param expectedClasses expected provider classes
+   * @param list providers to check
+   */
+  private static void assertCredentialProviders(
+      List<Class<?>> expectedClasses,
+      AWSCredentialProviderList list) {
+    assertNotNull(list);
+    List<AwsCredentialsProvider> providers = list.getProviders();
+    Assertions.assertThat(providers)
+        .describedAs("providers")
+        .hasSize(expectedClasses.size());
+    for (int i = 0; i < expectedClasses.size(); ++i) {
+      Class<?> expectedClass =
+          expectedClasses.get(i);
+      AwsCredentialsProvider provider = providers.get(i);
+      assertNotNull(
+          String.format("At position %d, expected class is %s, but found null.",
+              i, expectedClass), provider);
+      assertTrue(
+          String.format("At position %d, expected class is %s, but found %s.",
+              i, expectedClass, provider.getClass()),
+          expectedClass.isAssignableFrom(provider.getClass()));
+    }
+  }
+
+
+  public static class LegacyV1CredentialProvider implements AWSCredentialsProvider {
+
+    public LegacyV1CredentialProvider() {
+    }
+
+    @Override
+    public AWSCredentials getCredentials() {
+      return null;
+    }
+
+    @Override
+    public void refresh() {
+
+    }
+  }
+
+  /**
+   * V1 credentials with a configuration constructor.
+   */
+  public static final class LegacyV1CredentialProviderWithConf
+      extends LegacyV1CredentialProvider {
+
+    public LegacyV1CredentialProviderWithConf(Configuration conf) {
+    }
+  }
+
+  /**
+   * V1 Credentials whose factory method raises ClassNotFoundException.
+   * Expect this to fail rather than trigger recursive recovery;
+   * exception will be wrapped with something intended to be informative.
+   */
+  @Test
+  public void testV1InstantiationFailurePropagation() throws Throwable {
+    InstantiationIOException expected = intercept(InstantiationIOException.class,
+        "simulated CNFE",
+        () -> createAWSCredentialProviderList(
+            TESTFILE_URI,
+            createProviderConfiguration(V1CredentialProviderDoesNotInstantiate.class.getName())));
+    // print for the curious
+    LOG.info("{}", expected.toString());
+  }
+
+
+  /**
+   * V1 credentials which raises an instantiation exception.
+   */
+  public static final class V1CredentialProviderDoesNotInstantiate
+      extends LegacyV1CredentialProvider {
+
+    private V1CredentialProviderDoesNotInstantiate() {
+    }
+
+    public static AWSCredentialsProvider getInstance() throws ClassNotFoundException {
+      throw new ClassNotFoundException("simulated CNFE");
+    }
+  }
+
+
+}

+ 0 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/AbstractAuditingTest.java

@@ -25,7 +25,6 @@ import java.net.URI;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.function.Consumer;
 import java.util.stream.Collectors;
 
 

+ 8 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/ITestAuditManager.java

@@ -34,6 +34,7 @@ import static org.apache.hadoop.fs.s3a.Statistic.AUDIT_REQUEST_EXECUTION;
 import static org.apache.hadoop.fs.s3a.audit.AuditTestSupport.enableLoggingAuditor;
 import static org.apache.hadoop.fs.s3a.audit.AuditTestSupport.resetAuditOptions;
 import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.AUDIT_EXECUTION_INTERCEPTORS;
+import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.AUDIT_REQUEST_HANDLERS;
 import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.UNAUDITED_OPERATION;
 import static org.apache.hadoop.fs.statistics.IOStatisticAssertions.assertThatStatisticCounter;
 import static org.apache.hadoop.fs.statistics.IOStatisticAssertions.lookupCounterStatistic;
@@ -59,6 +60,7 @@ public class ITestAuditManager extends AbstractS3ACostTest {
     enableLoggingAuditor(conf);
     conf.set(AUDIT_EXECUTION_INTERCEPTORS,
         SimpleAWSExecutionInterceptor.CLASS);
+    conf.set(AUDIT_REQUEST_HANDLERS, "not-valid-class");
     return conf;
   }
 
@@ -114,8 +116,8 @@ public class ITestAuditManager extends AbstractS3ACostTest {
   }
 
   @Test
-  public void testRequestHandlerBinding() throws Throwable {
-    describe("Verify that extra request handlers can be added and that they"
+  public void testExecutionInterceptorBinding() throws Throwable {
+    describe("Verify that extra ExecutionInterceptor can be added and that they"
         + " will be invoked during request execution");
     final long baseCount = SimpleAWSExecutionInterceptor.getInvocationCount();
     final S3AFileSystem fs = getFileSystem();
@@ -131,5 +133,9 @@ public class ITestAuditManager extends AbstractS3ACostTest {
         .isGreaterThan(exec0);
     assertThatStatisticCounter(iostats(), AUDIT_FAILURE.getSymbol())
         .isZero();
+    Assertions.assertThat(SimpleAWSExecutionInterceptor.getStaticConf())
+        .describedAs("configuratin of SimpleAWSExecutionInterceptor")
+        .isNotNull()
+        .isSameAs(fs.getConf());
   }
 }

+ 18 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/SimpleAWSExecutionInterceptor.java

@@ -24,17 +24,23 @@ import software.amazon.awssdk.core.interceptor.Context;
 import software.amazon.awssdk.core.interceptor.ExecutionAttributes;
 import software.amazon.awssdk.core.interceptor.ExecutionInterceptor;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+
 /**
  * Simple AWS interceptor to verify dynamic loading of extra
  * execution interceptors during auditing setup.
  * The invocation counter tracks the count of calls to
  * {@link #beforeExecution}.
  */
-public final class SimpleAWSExecutionInterceptor implements ExecutionInterceptor {
+public final class SimpleAWSExecutionInterceptor extends Configured
+    implements ExecutionInterceptor {
 
   public static final String CLASS
       = "org.apache.hadoop.fs.s3a.audit.SimpleAWSExecutionInterceptor";
 
+  private static Configuration staticConf;
+
   /** Count of invocations. */
   private static final AtomicLong INVOCATIONS = new AtomicLong(0);
 
@@ -42,6 +48,7 @@ public final class SimpleAWSExecutionInterceptor implements ExecutionInterceptor
   public void beforeExecution(Context.BeforeExecution context,
       ExecutionAttributes executionAttributes) {
     INVOCATIONS.incrementAndGet();
+    staticConf = getConf();
   }
 
   /**
@@ -51,4 +58,14 @@ public final class SimpleAWSExecutionInterceptor implements ExecutionInterceptor
   public static long getInvocationCount() {
     return INVOCATIONS.get();
   }
+
+  /**
+   * get the static conf, which is set the config of the
+   * last executor invoked.
+   * @return the static configuration.
+   */
+
+  public static Configuration getStaticConf() {
+    return staticConf;
+  }
 }

+ 9 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java

@@ -52,13 +52,14 @@ import org.apache.hadoop.fs.s3a.commit.files.PendingSet;
 import org.apache.hadoop.fs.s3a.commit.files.SinglePendingCommit;
 import org.apache.hadoop.fs.s3a.commit.impl.CommitContext;
 import org.apache.hadoop.fs.s3a.commit.impl.CommitOperations;
+import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
 import org.apache.hadoop.fs.s3a.s3guard.S3GuardTool;
 import org.apache.hadoop.fs.s3a.statistics.CommitterStatistics;
 
 import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
 import static org.apache.hadoop.fs.s3a.Constants.*;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
-import static org.apache.hadoop.fs.s3a.auth.AwsCredentialListProvider.E_FORBIDDEN_AWS_PROVIDER;
+import static org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory.E_FORBIDDEN_AWS_PROVIDER;
 import static org.apache.hadoop.fs.s3a.auth.RoleTestUtils.*;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.*;
 import static org.apache.hadoop.fs.s3a.auth.RolePolicies.*;
@@ -189,7 +190,12 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     conf.set(ASSUMED_ROLE_ARN, ROLE_ARN_EXAMPLE);
     interceptClosing(StsException.class,
         "",
-        () -> new AssumedRoleCredentialProvider(uri, conf));
+        () -> {
+          AssumedRoleCredentialProvider p =
+              new AssumedRoleCredentialProvider(uri, conf);
+          p.resolveCredentials();
+          return p;
+        });
   }
 
   @Test
@@ -241,7 +247,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     conf.set(ASSUMED_ROLE_CREDENTIALS_PROVIDER,
         AssumedRoleCredentialProvider.NAME);
     expectFileSystemCreateFailure(conf,
-        IOException.class,
+        InstantiationIOException.class,
         E_FORBIDDEN_AWS_PROVIDER);
   }
 

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestCustomSigner.java

@@ -152,7 +152,7 @@ public class ITestCustomSigner extends AbstractS3ATestBase {
   }
 
   private String determineRegion(String bucketName) throws IOException {
-    return getFileSystem().getBucketLocation(bucketName);
+    return getS3AInternals().getBucketLocation(bucketName);
   }
 
   @Private

+ 5 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/RoleTestUtils.java

@@ -151,13 +151,17 @@ public final class RoleTestUtils {
       final String roleARN) {
     Configuration conf = new Configuration(srcConf);
     removeBaseAndBucketOverrides(conf,
+        S3A_BUCKET_PROBE,
         DELEGATION_TOKEN_BINDING,
         ASSUMED_ROLE_ARN,
-        AWS_CREDENTIALS_PROVIDER);
+        AWS_CREDENTIALS_PROVIDER,
+        ASSUMED_ROLE_SESSION_DURATION);
     conf.set(AWS_CREDENTIALS_PROVIDER, AssumedRoleCredentialProvider.NAME);
     conf.set(ASSUMED_ROLE_ARN, roleARN);
     conf.set(ASSUMED_ROLE_SESSION_NAME, "test");
     conf.set(ASSUMED_ROLE_SESSION_DURATION, "15m");
+    // force in bucket resolution during startup
+    conf.setInt(S3A_BUCKET_PROBE, 1);
     disableFilesystemCaching(conf);
     return conf;
   }

+ 25 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/CountInvocationsProvider.java

@@ -20,6 +20,8 @@ package org.apache.hadoop.fs.s3a.auth.delegation;
 
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import software.amazon.awssdk.auth.credentials.AwsCredentials;
 import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
 
@@ -31,14 +33,35 @@ import org.apache.hadoop.fs.s3a.CredentialInitializationException;
 public class CountInvocationsProvider
     implements AwsCredentialsProvider {
 
+  private static final Logger LOG = LoggerFactory.getLogger(
+      CountInvocationsProvider.class);
+
   public static final String NAME = CountInvocationsProvider.class.getName();
 
   public static final AtomicLong COUNTER = new AtomicLong(0);
 
+  private final AtomicLong instanceCounter = new AtomicLong(0);
+
   @Override
   public AwsCredentials resolveCredentials() {
-    COUNTER.incrementAndGet();
-    throw new CredentialInitializationException("no credentials");
+    final long global = COUNTER.incrementAndGet();
+    final long local = instanceCounter.incrementAndGet();
+    final String msg =
+        String.format("counter with global count %d and local count %d", global, local);
+    LOG.debug("resolving credentials from {}", msg);
+    throw new CredentialInitializationException("no credentials from " + msg);
+  }
+
+  public long getInstanceCounter() {
+    return instanceCounter.get();
+  }
+
+  @Override
+  public String toString() {
+    return "CountInvocationsProvider{" +
+        "instanceCounter=" + instanceCounter.get() +
+        "; global counter=" + COUNTER.get() +
+        '}';
   }
 
   public static long getInvocationCount() {

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFilesystem.java

@@ -582,7 +582,7 @@ public class ITestSessionDelegationInFilesystem extends AbstractDelegationIT {
   protected HeadBucketResponse readLandsatMetadata(final S3AFileSystem delegatedFS)
       throws Exception {
     AWSCredentialProviderList testingCreds
-        = delegatedFS.shareCredentials("testing");
+        = delegatedFS.getS3AInternals().shareCredentials("testing");
 
     URI landsat = new URI(DEFAULT_CSVTEST_FILE);
     DefaultS3ClientFactory factory

+ 5 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationTokens.java

@@ -186,11 +186,15 @@ public class ITestSessionDelegationTokens extends AbstractDelegationIT {
     final MarshalledCredentials creds;
     try(S3ADelegationTokens dt2 = instantiateDTSupport(getConfiguration())) {
       dt2.start();
+      // first creds are good
+      dt2.getCredentialProviders().resolveCredentials();
+
+      // reset to the original dt
 
       dt2.resetTokenBindingToDT(originalDT);
       final AwsSessionCredentials awsSessionCreds
           = verifySessionCredentials(
-          dt2.getCredentialProviders().resolveCredentials());
+              dt2.getCredentialProviders().resolveCredentials());
       final MarshalledCredentials origCreds = fromAWSCredentials(
           awsSessionCreds);
 

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java

@@ -741,8 +741,8 @@ public abstract class AbstractITCommitProtocol extends AbstractCommitITest {
    */
   private void validateStorageClass(Path dir, String expectedStorageClass) throws Exception {
     Path expectedFile = getPart0000(dir);
-    S3AFileSystem fs = getFileSystem();
-    String actualStorageClass = fs.getObjectMetadata(expectedFile).storageClassAsString();
+    String actualStorageClass = getS3AInternals().getObjectMetadata(expectedFile)
+        .storageClassAsString();
 
     Assertions.assertThat(actualStorageClass)
         .describedAs("Storage class of object %s", expectedFile)

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestDirectoryMarkerListing.java

@@ -214,7 +214,7 @@ public class ITestDirectoryMarkerListing extends AbstractS3ATestBase {
   public void setup() throws Exception {
     super.setup();
     S3AFileSystem fs = getFileSystem();
-    s3client = fs.getAmazonS3ClientForTesting("markers");
+    s3client = getS3AInternals().getAmazonS3V2ClientForTesting("markers");
     bucket = fs.getBucket();
     Path base = new Path(methodPath(), "base");
 

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesStorageClass.java

@@ -125,8 +125,8 @@ public class ITestS3AHugeFilesStorageClass extends AbstractSTestS3AHugeFiles {
   }
 
   protected void assertStorageClass(Path hugeFile) throws IOException {
-    S3AFileSystem fs = getFileSystem();
-    String actual = fs.getObjectMetadata(hugeFile).storageClassAsString();
+
+    String actual = getS3AInternals().getObjectMetadata(hugeFile).storageClassAsString();
 
     assertTrue(
         "Storage class of object is " + actual + ", expected " + STORAGE_CLASS_REDUCED_REDUNDANCY,

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/StreamPublisher.java

@@ -35,12 +35,12 @@ final class StreamPublisher<T> implements SdkPublisher<T> {
   private final Iterator<T> iterator;
   private Boolean done = false;
 
-  public StreamPublisher(Stream<T> data, Executor executor) {
+  StreamPublisher(Stream<T> data, Executor executor) {
     this.iterator = data.iterator();
     this.executor = executor;
   }
 
-  public StreamPublisher(Stream<T> data) {
+  StreamPublisher(Stream<T> data) {
     this(data, Runnable::run);
   }
 

+ 3 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/TestSelectEventStreamPublisher.java

@@ -156,7 +156,9 @@ public final class TestSelectEventStreamPublisher extends Assert {
         SelectObjectContentEventStream.recordsBuilder()
             .payload(SdkBytes.fromUtf8String("bar"))
             .build())
-        .map(e -> { throw SdkException.create("error!", null); }));
+        .map(e -> {
+          throw SdkException.create("error!", null);
+        }));
 
     try (AbortableInputStream inputStream =
         selectEventStreamPublisher.toRecordsInputStream(e -> {})) {

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/statistics/ITestAWSStatisticCollection.java

@@ -53,7 +53,7 @@ public class ITestAWSStatisticCollection extends AbstractS3ATestBase {
     conf.unset("fs.s3a.bucket.landsat-pds.endpoint");
 
     try (S3AFileSystem fs = (S3AFileSystem) path.getFileSystem(conf)) {
-      fs.getObjectMetadata(path);
+      fs.getS3AInternals().getObjectMetadata(path);
       IOStatistics iostats = fs.getIOStatistics();
       assertThatStatisticCounter(iostats,
           STORE_IO_REQUEST.getSymbol())
@@ -71,7 +71,7 @@ public class ITestAWSStatisticCollection extends AbstractS3ATestBase {
     conf.set(ENDPOINT, DEFAULT_ENDPOINT);
 
     try (S3AFileSystem fs = (S3AFileSystem) path.getFileSystem(conf)) {
-      fs.getObjectMetadata(path);
+      fs.getS3AInternals().getObjectMetadata(path);
       IOStatistics iostats = fs.getIOStatistics();
       assertThatStatisticCounter(iostats,
           STORE_IO_REQUEST.getSymbol())