Browse Source

Revert "HADOOP-19492. S3A: Some tests failing on third-party stores"

This reverts commit 2dd658252bd2ec9831c0984823e173fca11f9051.
Steve Loughran 1 month ago
parent
commit
08ef3c477a

+ 2 - 13
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/third_party_stores.md

@@ -40,7 +40,6 @@ The features which may be unavailable include:
   This is now the default -do not change it.
 * List API to use (`fs.s3a.list.version = 1`)
 * Bucket lifecycle rules to clean up pending uploads.
-* Support for multipart uploads.
 
 ### Disabling Change Detection
 
@@ -410,7 +409,7 @@ which is a subset of the AWS API.
 To get a compatible access and secret key, follow the instructions of
 [Simple migration from Amazon S3 to Cloud Storage](https://cloud.google.com/storage/docs/aws-simple-migration#defaultproj).
 
-Here are the per-bucket settings for an example bucket "gcs-container"
+Here are the per-bucket setings for an example bucket "gcs-container"
 in Google Cloud Storage. Note the multiobject delete option must be disabled;
 this makes renaming and deleting significantly slower.
 
@@ -453,21 +452,11 @@ this makes renaming and deleting significantly slower.
     <value>true</value>
   </property>
 
-  <!-- any value is allowed here, using "gcs" is more informative -->
   <property>
     <name>fs.s3a.bucket.gcs-container.endpoint.region</name>
-    <value>gcs</value>
+    <value>dummy</value>
   </property>
 
-  <!-- multipart uploads trigger 400 response-->
-  <property>
-    <name>fs.s3a.multipart.uploads.enabled</name>
-    <value>false</value>
-  </property>
-    <property>
-    <name>fs.s3a.optimized.copy.from.local.enabled</name>
-    <value>false</value>
-  </property>
 </configuration>
 ```
 

+ 1 - 1
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md

@@ -1392,7 +1392,7 @@ software.amazon.awssdk.metrics.LoggingMetricPublisher
 ```
 
 ```text
-INFO  metrics.LoggingMetricPublisher (LoggerAdapter.java:info(165)) - Metrics published:
+INFO  metrics.LoggingMetricPublisher (LoggerAdapter.java:info(165)) - Metrics published: 
 MetricCollection(name=ApiCall, metrics=[
 MetricRecord(metric=MarshallingDuration, value=PT0.000092041S),
 MetricRecord(metric=RetryCount, value=0),

+ 0 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java

@@ -589,8 +589,6 @@ public class ITestS3AConfiguration extends AbstractHadoopTestBase {
     config.set(AWS_REGION, EU_WEST_1);
     disableFilesystemCaching(config);
     fs = S3ATestUtils.createTestFileSystem(config);
-    assumeStoreAwsHosted(fs);
-
 
     S3Client s3Client = getS3Client("testS3SpecificSignerOverride");
 

+ 0 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java

@@ -39,7 +39,6 @@ import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
 import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
 import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
 
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.assumeStoreAwsHosted;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.maybeSkipRootTests;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
@@ -102,7 +101,6 @@ public class ITestS3AEncryptionSSEC extends AbstractTestS3AEncryption {
     // although not a root dir test, this confuses paths enough it shouldn't be run in
     // parallel with other jobs
     maybeSkipRootTests(getConfiguration());
-    assumeStoreAwsHosted(getFileSystem());
   }
 
   @Override

+ 0 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java

@@ -55,7 +55,6 @@ import static org.apache.hadoop.fs.s3a.Constants.PATH_STYLE_ACCESS;
 import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
 import static org.apache.hadoop.fs.s3a.DefaultS3ClientFactory.ERROR_ENDPOINT_WITH_FIPS;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.assume;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.assumeStoreAwsHosted;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
 import static org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils.DEFAULT_REQUESTER_PAYS_BUCKET_NAME;
 import static org.apache.hadoop.io.IOUtils.closeStream;
@@ -482,7 +481,6 @@ public class ITestS3AEndpointRegion extends AbstractS3ATestBase {
     describe("Access the test bucket using central endpoint and"
         + " null region, perform file system CRUD operations");
     final Configuration conf = getConfiguration();
-    assumeStoreAwsHosted(getFileSystem());
 
     final Configuration newConf = new Configuration(conf);
 
@@ -505,7 +503,6 @@ public class ITestS3AEndpointRegion extends AbstractS3ATestBase {
   public void testCentralEndpointAndNullRegionFipsWithCRUD() throws Throwable {
     describe("Access the test bucket using central endpoint and"
         + " null region and fips enabled, perform file system CRUD operations");
-    assumeStoreAwsHosted(getFileSystem());
 
     final String bucketLocation = getFileSystem().getBucketLocation();
     assume("FIPS can be enabled to access buckets from US or Canada endpoints only",

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java

@@ -1162,7 +1162,7 @@ public final class S3ATestUtils {
    */
   public static void assumeStoreAwsHosted(final FileSystem fs) {
     assume("store is not AWS S3",
-        NetworkBinding.isAwsEndpoint(fs.getConf()
+        !NetworkBinding.isAwsEndpoint(fs.getConf()
             .getTrimmed(ENDPOINT, DEFAULT_ENDPOINT)));
   }
 

+ 0 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/tools/ITestBucketTool.java

@@ -157,7 +157,6 @@ public class ITestBucketTool extends AbstractS3ATestBase {
 
   @Test
   public void testS3ExpressBucketWithoutZoneParam() throws Throwable {
-    assumeStoreAwsHosted(getFileSystem());
     expectErrorCode(EXIT_USAGE,
         intercept(ExitUtil.ExitException.class, NO_ZONE_SUPPLIED, () ->
             bucketTool.exec("bucket", d(CREATE),