Browse Source

HADOOP-18939. NPE in AWS v2 SDK RetryOnErrorCodeCondition.shouldRetry() (#6193)

MultiObjectDeleteException to fill in the error details

See also: https://github.com/aws/aws-sdk-java-v2/issues/4600

Contributed by Steve Loughran
Steve Loughran 1 year ago
parent
commit
b7fee114e6

+ 32 - 1
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/MultiObjectDeleteException.java

@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.nio.file.AccessDeniedException;
 import java.util.List;
 
+import software.amazon.awssdk.awscore.exception.AwsErrorDetails;
+import software.amazon.awssdk.http.SdkHttpResponse;
 import software.amazon.awssdk.services.s3.model.S3Error;
 import software.amazon.awssdk.services.s3.model.S3Exception;
 import org.slf4j.Logger;
@@ -55,10 +57,39 @@ public class MultiObjectDeleteException extends S3Exception {
    */
   public static final String ACCESS_DENIED = "AccessDenied";
 
+  /**
+   * Field value for the superclass builder: {@value}.
+   */
+  private static final int STATUS_CODE = SC_200_OK;
+
+  /**
+   * Field value for the superclass builder: {@value}.
+   */
+  private static final String ERROR_CODE = "MultiObjectDeleteException";
+
+  /**
+   * Field value for the superclass builder: {@value}.
+   */
+  private static final String SERVICE_NAME = "Amazon S3";
+
+  /**
+   * Extracted error list.
+   */
   private final List<S3Error> errors;
 
   public MultiObjectDeleteException(List<S3Error> errors) {
-    super(builder().message(errors.toString()).statusCode(SC_200_OK));
+    super(builder()
+        .message(errors.toString())
+        .awsErrorDetails(
+            AwsErrorDetails.builder()
+                .errorCode(ERROR_CODE)
+                .errorMessage(ERROR_CODE)
+                .serviceName(SERVICE_NAME)
+                .sdkHttpResponse(SdkHttpResponse.builder()
+                    .statusCode(STATUS_CODE)
+                    .build())
+                .build())
+        .statusCode(STATUS_CODE));
     this.errors = errors;
   }
 

+ 22 - 0
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestErrorTranslation.java

@@ -22,15 +22,20 @@ import java.io.IOException;
 import java.net.ConnectException;
 import java.net.NoRouteToHostException;
 import java.net.UnknownHostException;
+import java.util.Collections;
 
+import org.assertj.core.api.Assertions;
 import org.junit.Test;
+import software.amazon.awssdk.awscore.retry.conditions.RetryOnErrorCodeCondition;
 import software.amazon.awssdk.core.exception.SdkClientException;
+import software.amazon.awssdk.core.retry.RetryPolicyContext;
 
 import org.apache.hadoop.fs.PathIOException;
 import org.apache.hadoop.test.AbstractHadoopTestBase;
 
 import static org.apache.hadoop.fs.s3a.impl.ErrorTranslation.maybeExtractNetworkException;
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Unit tests related to the {@link ErrorTranslation} class.
@@ -112,4 +117,21 @@ public class TestErrorTranslation extends AbstractHadoopTestBase {
     }
   }
 
+
+  @Test
+  public void testMultiObjectExceptionFilledIn() throws Throwable {
+
+    MultiObjectDeleteException ase =
+        new MultiObjectDeleteException(Collections.emptyList());
+    RetryPolicyContext context = RetryPolicyContext.builder()
+        .exception(ase)
+        .build();
+    RetryOnErrorCodeCondition retry = RetryOnErrorCodeCondition.create("");
+    assertTrue("retry policy of MultiObjectException",
+        retry.shouldRetry(context));
+
+    Assertions.assertThat(retry.shouldRetry(context))
+        .describedAs("retry policy of MultiObjectException")
+        .isFalse();
+  }
 }