瀏覽代碼

HADOOP-13446. Support running isolated unit tests separate from AWS integration tests. Contributed by Chris Nauroth.

Chris Nauroth 8 年之前
父節點
當前提交
6f9c346e57
共有 48 個文件被更改,包括 323 次插入124 次删除
  1. 5 0
      hadoop-project/pom.xml
  2. 75 20
      hadoop-tools/hadoop-aws/pom.xml
  3. 51 16
      hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
  4. 4 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractCreate.java
  5. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDelete.java
  6. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java
  7. 5 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractGetFileStatus.java
  8. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractMkdir.java
  9. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractOpen.java
  10. 4 3
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRename.java
  11. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRootDir.java
  12. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractSeek.java
  13. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractCreate.java
  14. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractDelete.java
  15. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractMkdir.java
  16. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractOpen.java
  17. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractRename.java
  18. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractRootDir.java
  19. 4 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractSeek.java
  20. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestBlockingThreadPoolExecutorService.java
  21. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java
  22. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java
  23. 5 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java
  24. 10 4
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
  25. 3 3
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACredentialsInURL.java
  26. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java
  27. 2 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java
  28. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionFastOutputStream.java
  29. 2 4
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java
  30. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFastOutputStream.java
  31. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java
  32. 5 3
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
  33. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java
  34. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContext.java
  35. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextCreateMkdir.java
  36. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextMainOperations.java
  37. 4 3
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextStatistics.java
  38. 4 3
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextURI.java
  39. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextUtil.java
  40. 44 0
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java
  41. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java
  42. 2 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java
  43. 4 4
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java
  44. 5 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java
  45. 9 5
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java
  46. 5 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestInMemoryNativeS3FileSystemContract.java
  47. 7 3
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeFileSystemStore.java
  48. 5 2
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeS3FileSystemContract.java

+ 5 - 0
hadoop-project/pom.xml

@@ -1191,6 +1191,11 @@
           <artifactId>maven-surefire-plugin</artifactId>
           <artifactId>maven-surefire-plugin</artifactId>
           <version>${maven-surefire-plugin.version}</version>
           <version>${maven-surefire-plugin.version}</version>
         </plugin>
         </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-failsafe-plugin</artifactId>
+          <version>${maven-failsafe-plugin.version}</version>
+        </plugin>
         <plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-install-plugin</artifactId>
           <artifactId>maven-install-plugin</artifactId>

+ 75 - 20
hadoop-tools/hadoop-aws/pom.xml

@@ -46,7 +46,7 @@
         </file>
         </file>
       </activation>
       </activation>
       <properties>
       <properties>
-        <maven.test.skip>true</maven.test.skip>
+        <skipITs>true</skipITs>
       </properties>
       </properties>
     </profile>
     </profile>
     <profile>
     <profile>
@@ -57,11 +57,16 @@
         </file>
         </file>
       </activation>
       </activation>
       <properties>
       <properties>
-        <maven.test.skip>false</maven.test.skip>
+        <skipITs>false</skipITs>
       </properties>
       </properties>
     </profile>
     </profile>
     <profile>
     <profile>
       <id>parallel-tests</id>
       <id>parallel-tests</id>
+      <activation>
+        <property>
+          <name>parallel-tests</name>
+        </property>
+      </activation>
       <build>
       <build>
         <plugins>
         <plugins>
           <plugin>
           <plugin>
@@ -96,12 +101,32 @@
           <plugin>
           <plugin>
             <groupId>org.apache.maven.plugins</groupId>
             <groupId>org.apache.maven.plugins</groupId>
             <artifactId>maven-surefire-plugin</artifactId>
             <artifactId>maven-surefire-plugin</artifactId>
+            <configuration>
+              <forkCount>${testsThreadCount}</forkCount>
+              <reuseForks>false</reuseForks>
+              <argLine>${maven-surefire-plugin.argLine} -DminiClusterDedicatedDirs=true</argLine>
+              <systemPropertyVariables>
+                <test.build.data>${test.build.data}/${surefire.forkNumber}</test.build.data>
+                <test.build.dir>${test.build.dir}/${surefire.forkNumber}</test.build.dir>
+                <hadoop.tmp.dir>${hadoop.tmp.dir}/${surefire.forkNumber}</hadoop.tmp.dir>
+
+                <!-- Due to a Maven quirk, setting this to just -->
+                <!-- surefire.forkNumber won't do the parameter -->
+                <!-- substitution.  Putting a prefix in front of it like -->
+                <!-- "fork-" makes it work. -->
+                <test.unique.fork.id>fork-${surefire.forkNumber}</test.unique.fork.id>
+              </systemPropertyVariables>
+            </configuration>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-failsafe-plugin</artifactId>
             <executions>
             <executions>
               <execution>
               <execution>
-                <id>default-test</id>
-                <phase>test</phase>
+                <id>default-integration-test</id>
                 <goals>
                 <goals>
-                  <goal>test</goal>
+                  <goal>integration-test</goal>
+                  <goal>verify</goal>
                 </goals>
                 </goals>
                 <configuration>
                 <configuration>
                   <forkCount>${testsThreadCount}</forkCount>
                   <forkCount>${testsThreadCount}</forkCount>
@@ -129,32 +154,35 @@
                   <!-- Exclude all of these tests from parallel execution, -->
                   <!-- Exclude all of these tests from parallel execution, -->
                   <!-- and instead run them sequentially in a separate -->
                   <!-- and instead run them sequentially in a separate -->
                   <!-- Surefire execution step later. -->
                   <!-- Surefire execution step later. -->
+                  <includes>
+                    <include>**/ITest*.java</include>
+                  </includes>
                   <excludes>
                   <excludes>
-                    <exclude>**/TestJets3tNativeS3FileSystemContract.java</exclude>
-                    <exclude>**/TestS3ABlockingThreadPool.java</exclude>
-                    <exclude>**/TestS3AFastOutputStream.java</exclude>
-                    <exclude>**/TestS3AFileSystemContract.java</exclude>
-                    <exclude>**/TestS3AMiniYarnCluster.java</exclude>
-                    <exclude>**/Test*Root*.java</exclude>
+                    <exclude>**/ITestJets3tNativeS3FileSystemContract.java</exclude>
+                    <exclude>**/ITestS3ABlockingThreadPool.java</exclude>
+                    <exclude>**/ITestS3AFastOutputStream.java</exclude>
+                    <exclude>**/ITestS3AFileSystemContract.java</exclude>
+                    <exclude>**/ITestS3AMiniYarnCluster.java</exclude>
+                    <exclude>**/ITest*Root*.java</exclude>
                   </excludes>
                   </excludes>
                 </configuration>
                 </configuration>
               </execution>
               </execution>
               <execution>
               <execution>
-                <id>sequential-tests</id>
-                <phase>test</phase>
+                <id>sequential-integration-tests</id>
                 <goals>
                 <goals>
-                  <goal>test</goal>
+                  <goal>integration-test</goal>
+                  <goal>verify</goal>
                 </goals>
                 </goals>
                 <configuration>
                 <configuration>
                   <!-- Do a sequential run for tests that cannot handle -->
                   <!-- Do a sequential run for tests that cannot handle -->
                   <!-- parallel execution. -->
                   <!-- parallel execution. -->
                   <includes>
                   <includes>
-                    <include>**/TestJets3tNativeS3FileSystemContract.java</include>
-                    <include>**/TestS3ABlockingThreadPool.java</include>
-                    <include>**/TestS3AFastOutputStream.java</include>
-                    <include>**/TestS3AFileSystemContract.java</include>
-                    <include>**/TestS3AMiniYarnCluster.java</include>
-                    <include>**/Test*Root*.java</include>
+                    <include>**/ITestJets3tNativeS3FileSystemContract.java</include>
+                    <include>**/ITestS3ABlockingThreadPool.java</include>
+                    <include>**/ITestS3AFastOutputStream.java</include>
+                    <include>**/ITestS3AFileSystemContract.java</include>
+                    <include>**/ITestS3AMiniYarnCluster.java</include>
+                    <include>**/ITest*Root*.java</include>
                   </includes>
                   </includes>
                 </configuration>
                 </configuration>
               </execution>
               </execution>
@@ -163,6 +191,33 @@
         </plugins>
         </plugins>
       </build>
       </build>
     </profile>
     </profile>
+    <profile>
+      <id>sequential-tests</id>
+      <activation>
+        <property>
+          <name>!parallel-tests</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-failsafe-plugin</artifactId>
+            <executions>
+              <execution>
+                <goals>
+                  <goal>integration-test</goal>
+                  <goal>verify</goal>
+                </goals>
+                <configuration>
+                  <forkedProcessTimeoutInSeconds>3600</forkedProcessTimeoutInSeconds>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
   </profiles>
   </profiles>
 
 
   <build>
   <build>

+ 51 - 16
hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md

@@ -1319,11 +1319,18 @@ works with S3 to something lower. See [AWS documentation](http://docs.aws.amazon
 
 
 ## Testing the S3 filesystem clients
 ## Testing the S3 filesystem clients
 
 
-Due to eventual consistency, tests may fail without reason. Transient
-failures, which no longer occur upon rerunning the test, should thus be ignored.
+This module includes both unit tests, which can run in isolation without
+connecting to the S3 service, and integration tests, which require a working
+connection to S3 to interact with a bucket.  Unit test suites follow the naming
+convention `Test*.java`.  Integration tests follow the naming convention
+`ITest*.java`.
 
 
-To test the S3* filesystem clients, you need to provide two files
-which pass in authentication details to the test runner
+Due to eventual consistency, integration tests may fail without reason.
+Transient failures, which no longer occur upon rerunning the test, should thus
+be ignored.
+
+To integration test the S3* filesystem clients, you need to provide two files
+which pass in authentication details to the test runner.
 
 
 1. `auth-keys.xml`
 1. `auth-keys.xml`
 1. `core-site.xml`
 1. `core-site.xml`
@@ -1343,7 +1350,8 @@ need to apply a specific, non-default property change during the tests.
 
 
 The presence of this file triggers the testing of the S3 classes.
 The presence of this file triggers the testing of the S3 classes.
 
 
-Without this file, *none of the tests in this module will be executed*
+Without this file, *none of the integration tests in this module will be
+executed*.
 
 
 The XML file must contain all the ID/key information needed to connect
 The XML file must contain all the ID/key information needed to connect
 each of the filesystem clients to the object stores, and a URL for
 each of the filesystem clients to the object stores, and a URL for
@@ -1496,23 +1504,50 @@ source code tree, it is not going to get accidentally committed.
 
 
 After completing the configuration, execute the test run through Maven.
 After completing the configuration, execute the test run through Maven.
 
 
-    mvn clean test
+    mvn clean verify
 
 
-It's also possible to execute multiple test suites in parallel by enabling the
-`parallel-tests` Maven profile.  The tests spend most of their time blocked on
-network I/O with the S3 service, so running in parallel tends to complete full
-test runs faster.
+It's also possible to execute multiple test suites in parallel by passing the
+`parallel-tests` property on the command line.  The tests spend most of their
+time blocked on network I/O with the S3 service, so running in parallel tends to
+complete full test runs faster.
 
 
-    mvn -Pparallel-tests clean test
+    mvn -Dparallel-tests clean verify
 
 
 Some tests must run with exclusive access to the S3 bucket, so even with the
 Some tests must run with exclusive access to the S3 bucket, so even with the
-`parallel-tests` profile enabled, several test suites will run in serial in a
-separate Maven execution step after the parallel tests.
+`parallel-tests` property, several test suites will run in serial in a separate
+Maven execution step after the parallel tests.
+
+By default, `parallel-tests` runs 4 test suites concurrently.  This can be tuned
+by passing the `testsThreadCount` property.
+
+    mvn -Dparallel-tests -DtestsThreadCount=8 clean verify
+
+To run just unit tests, which do not require S3 connectivity or AWS credentials,
+use any of the above invocations, but switch the goal to `test` instead of
+`verify`.
+
+    mvn clean test
+
+    mvn -Dparallel-tests clean test
+
+    mvn -Dparallel-tests -DtestsThreadCount=8 clean test
+
+To run only a specific named subset of tests, pass the `test` property for unit
+tests or the `it.test` property for integration tests.
+
+    mvn clean test -Dtest=TestS3AInputPolicies
+
+    mvn clean verify -Dit.test=ITestS3AFileContextStatistics
 
 
-By default, the `parallel-tests` profile runs 4 test suites concurrently.  This
-can be tuned by passing the `testsThreadCount` argument.
+    mvn clean verify -Dtest=TestS3A* -Dit.test=ITestS3A*
 
 
-    mvn -Pparallel-tests -DtestsThreadCount=8 clean test
+Note that when running a specific subset of tests, the patterns passed in `test`
+and `it.test` override the configuration of which tests need to run in isolation
+in a separate serial phase (mentioned above).  This can cause unpredictable
+results, so the recommendation is to avoid passing `parallel-tests` in
+combination with `test` or `it.test`.  If you know that you are specifying only
+tests that can run safely in parallel, then it will work.  For wide patterns,
+like `ITestS3A*` shown above, it may cause unpredictable test failures.
 
 
 ### Testing against different regions
 ### Testing against different regions
 
 

+ 4 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractCreate.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractCreate.java

@@ -21,9 +21,11 @@ package org.apache.hadoop.fs.contract.s3a;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
 import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
 
 
-public class TestS3AContractCreate extends AbstractContractCreateTest {
+/**
+ * S3A contract tests creating files.
+ */
+public class ITestS3AContractCreate extends AbstractContractCreateTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractDelete.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDelete.java

@@ -22,7 +22,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
 import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
-public class TestS3AContractDelete extends AbstractContractDeleteTest {
+/**
+ * S3A contract tests covering deletes.
+ */
+public class ITestS3AContractDelete extends AbstractContractDeleteTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractDistCp.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java

@@ -27,7 +27,7 @@ import org.apache.hadoop.tools.contract.AbstractContractDistCpTest;
 /**
 /**
  * Contract test suite covering S3A integration with DistCp.
  * Contract test suite covering S3A integration with DistCp.
  */
  */
-public class TestS3AContractDistCp extends AbstractContractDistCpTest {
+public class ITestS3AContractDistCp extends AbstractContractDistCpTest {
 
 
   private static final long MULTIPART_SETTING = 8 * 1024 * 1024; // 8 MB
   private static final long MULTIPART_SETTING = 8 * 1024 * 1024; // 8 MB
 
 

+ 5 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractGetFileStatus.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractGetFileStatus.java

@@ -23,7 +23,11 @@ import org.apache.hadoop.fs.contract.AbstractContractGetFileStatusTest;
 import org.apache.hadoop.fs.s3a.Constants;
 import org.apache.hadoop.fs.s3a.Constants;
 import org.apache.hadoop.fs.s3a.S3ATestUtils;
 import org.apache.hadoop.fs.s3a.S3ATestUtils;
 
 
-public class TestS3AContractGetFileStatus extends AbstractContractGetFileStatusTest {
+/**
+ * S3A contract tests covering getFileStatus.
+ */
+public class ITestS3AContractGetFileStatus
+    extends AbstractContractGetFileStatusTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractMkdir.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractMkdir.java

@@ -23,9 +23,9 @@ import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
 /**
 /**
- * Test dir operations on S3
+ * Test dir operations on S3A.
  */
  */
-public class TestS3AContractMkdir extends AbstractContractMkdirTest {
+public class ITestS3AContractMkdir extends AbstractContractMkdirTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractOpen.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractOpen.java

@@ -22,7 +22,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
 import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
-public class TestS3AContractOpen extends AbstractContractOpenTest {
+/**
+ * S3A contract tests opening files.
+ */
+public class ITestS3AContractOpen extends AbstractContractOpenTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractRename.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRename.java

@@ -23,13 +23,14 @@ import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Test;
 
 
 import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
 
 
-public class TestS3AContractRename extends AbstractContractRenameTest {
+/**
+ * S3A contract tests covering rename.
+ */
+public class ITestS3AContractRename extends AbstractContractRenameTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractRootDir.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRootDir.java

@@ -31,11 +31,11 @@ import org.slf4j.LoggerFactory;
 /**
 /**
  * root dir operations against an S3 bucket.
  * root dir operations against an S3 bucket.
  */
  */
-public class TestS3AContractRootDir extends
+public class ITestS3AContractRootDir extends
     AbstractContractRootDirectoryTest {
     AbstractContractRootDirectoryTest {
 
 
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3AContractRootDir.class);
+      LoggerFactory.getLogger(ITestS3AContractRootDir.class);
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractSeek.java

@@ -22,7 +22,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
 import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
-public class TestS3AContractSeek extends AbstractContractSeekTest {
+/**
+ * S3A contract tests covering file seek.
+ */
+public class ITestS3AContractSeek extends AbstractContractSeekTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/TestS3NContractCreate.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractCreate.java

@@ -23,7 +23,10 @@ import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
 
 
-public class TestS3NContractCreate extends AbstractContractCreateTest {
+/**
+ * S3N contract tests creating files.
+ */
+public class ITestS3NContractCreate extends AbstractContractCreateTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/TestS3NContractDelete.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractDelete.java

@@ -22,7 +22,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
 import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
-public class TestS3NContractDelete extends AbstractContractDeleteTest {
+/**
+ * S3A contract tests covering deletes.
+ */
+public class ITestS3NContractDelete extends AbstractContractDeleteTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/TestS3NContractMkdir.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractMkdir.java

@@ -23,9 +23,9 @@ import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
 /**
 /**
- * Test dir operations on S3
+ * Test dir operations on S3.
  */
  */
-public class TestS3NContractMkdir extends AbstractContractMkdirTest {
+public class ITestS3NContractMkdir extends AbstractContractMkdirTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/TestS3NContractOpen.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractOpen.java

@@ -22,7 +22,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
 import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
-public class TestS3NContractOpen extends AbstractContractOpenTest {
+/**
+ * S3N contract tests opening files.
+ */
+public class ITestS3NContractOpen extends AbstractContractOpenTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/TestS3NContractRename.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractRename.java

@@ -22,7 +22,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
 import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
-public class TestS3NContractRename extends AbstractContractRenameTest {
+/**
+ * S3N contract tests covering rename.
+ */
+public class ITestS3NContractRename extends AbstractContractRenameTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/TestS3NContractRootDir.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractRootDir.java

@@ -23,9 +23,9 @@ import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
 /**
 /**
- * root dir operations against an S3 bucket
+ * Root dir operations against an S3 bucket.
  */
  */
-public class TestS3NContractRootDir extends
+public class ITestS3NContractRootDir extends
     AbstractContractRootDirectoryTest {
     AbstractContractRootDirectoryTest {
 
 
   @Override
   @Override

+ 4 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/TestS3NContractSeek.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3n/ITestS3NContractSeek.java

@@ -22,7 +22,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
 import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 
 
-public class TestS3NContractSeek extends AbstractContractSeekTest {
+/**
+ * S3N contract tests covering file seek.
+ */
+public class ITestS3NContractSeek extends AbstractContractSeekTest {
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestBlockingThreadPoolExecutorService.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestBlockingThreadPoolExecutorService.java

@@ -33,7 +33,7 @@ import static org.junit.Assert.assertFalse;
 /**
 /**
  * Basic unit test for S3A's blocking executor service.
  * Basic unit test for S3A's blocking executor service.
  */
  */
-public class TestBlockingThreadPoolExecutorService {
+public class ITestBlockingThreadPoolExecutorService {
 
 
   private static final Logger LOG = LoggerFactory.getLogger(
   private static final Logger LOG = LoggerFactory.getLogger(
       BlockingThreadPoolExecutorService.class);
       BlockingThreadPoolExecutorService.class);
@@ -179,4 +179,4 @@ public class TestBlockingThreadPoolExecutorService {
     }
     }
     tpe = null;
     tpe = null;
   }
   }
-}
+}

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java

@@ -48,9 +48,9 @@ import static org.junit.Assert.*;
  * Tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic.
  * Tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic.
  *
  *
  */
  */
-public class TestS3AAWSCredentialsProvider {
+public class ITestS3AAWSCredentialsProvider {
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3AAWSCredentialsProvider.class);
+      LoggerFactory.getLogger(ITestS3AAWSCredentialsProvider.class);
 
 
   @Rule
   @Rule
   public Timeout testTimeout = new Timeout(1 * 60 * 1000);
   public Timeout testTimeout = new Timeout(1 * 60 * 1000);

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ABlockingThreadPool.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java

@@ -34,7 +34,7 @@ import org.junit.rules.Timeout;
  * 4th part should not trigger an exception as it would with a
  * 4th part should not trigger an exception as it would with a
  * non-blocking threadpool.
  * non-blocking threadpool.
  */
  */
-public class TestS3ABlockingThreadPool {
+public class ITestS3ABlockingThreadPool {
 
 
   private Configuration conf;
   private Configuration conf;
   private S3AFileSystem fs;
   private S3AFileSystem fs;

+ 5 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ABlocksize.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java

@@ -35,10 +35,13 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.fileStatsToString;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.fileStatsToString;
 
 
-public class TestS3ABlocksize extends AbstractFSContractTestBase {
+/**
+ * S3A tests for configuring block size.
+ */
+public class ITestS3ABlocksize extends AbstractFSContractTestBase {
 
 
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3ABlocksize.class);
+      LoggerFactory.getLogger(ITestS3ABlocksize.class);
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 10 - 4
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AConfiguration.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java

@@ -50,7 +50,10 @@ import org.apache.hadoop.util.VersionInfo;
 import org.apache.http.HttpStatus;
 import org.apache.http.HttpStatus;
 import org.junit.rules.TemporaryFolder;
 import org.junit.rules.TemporaryFolder;
 
 
-public class TestS3AConfiguration {
+/**
+ * S3A tests for configuration.
+ */
+public class ITestS3AConfiguration {
   private static final String EXAMPLE_ID = "AKASOMEACCESSKEY";
   private static final String EXAMPLE_ID = "AKASOMEACCESSKEY";
   private static final String EXAMPLE_KEY =
   private static final String EXAMPLE_KEY =
       "RGV0cm9pdCBSZ/WQgY2xl/YW5lZCB1cAEXAMPLE";
       "RGV0cm9pdCBSZ/WQgY2xl/YW5lZCB1cAEXAMPLE";
@@ -59,7 +62,7 @@ public class TestS3AConfiguration {
   private S3AFileSystem fs;
   private S3AFileSystem fs;
 
 
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3AConfiguration.class);
+      LoggerFactory.getLogger(ITestS3AConfiguration.class);
 
 
   private static final String TEST_ENDPOINT = "test.fs.s3a.endpoint";
   private static final String TEST_ENDPOINT = "test.fs.s3a.endpoint";
 
 
@@ -351,7 +354,8 @@ public class TestS3AConfiguration {
   }
   }
 
 
   @Test
   @Test
-  public void shouldBeAbleToSwitchOnS3PathStyleAccessViaConfigProperty() throws Exception {
+  public void shouldBeAbleToSwitchOnS3PathStyleAccessViaConfigProperty()
+      throws Exception {
 
 
     conf = new Configuration();
     conf = new Configuration();
     conf.set(Constants.PATH_STYLE_ACCESS, Boolean.toString(true));
     conf.set(Constants.PATH_STYLE_ACCESS, Boolean.toString(true));
@@ -367,7 +371,9 @@ public class TestS3AConfiguration {
       assertTrue("Expected to find path style access to be switched on!",
       assertTrue("Expected to find path style access to be switched on!",
           clientOptions.isPathStyleAccess());
           clientOptions.isPathStyleAccess());
       byte[] file = ContractTestUtils.toAsciiByteArray("test file");
       byte[] file = ContractTestUtils.toAsciiByteArray("test file");
-      ContractTestUtils.writeAndRead(fs, new Path("/path/style/access/testFile"), file, file.length, conf.getInt(Constants.FS_S3A_BLOCK_SIZE, file.length), false, true);
+      ContractTestUtils.writeAndRead(fs,
+          new Path("/path/style/access/testFile"), file, file.length,
+          conf.getInt(Constants.FS_S3A_BLOCK_SIZE, file.length), false, true);
     } catch (final AWSS3IOException e) {
     } catch (final AWSS3IOException e) {
       LOG.error("Caught exception: ", e);
       LOG.error("Caught exception: ", e);
       // Catch/pass standard path style access behaviour when live bucket
       // Catch/pass standard path style access behaviour when live bucket

+ 3 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ACredentialsInURL.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACredentialsInURL.java

@@ -43,10 +43,10 @@ import static org.apache.hadoop.fs.s3a.S3ATestConstants.TEST_FS_S3A_NAME;
  * set, and a check that an invalid set do at least get stripped out
  * set, and a check that an invalid set do at least get stripped out
  * of the final URI
  * of the final URI
  */
  */
-public class TestS3ACredentialsInURL extends Assert {
+public class ITestS3ACredentialsInURL extends Assert {
   private S3AFileSystem fs;
   private S3AFileSystem fs;
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3ACredentialsInURL.class);
+      LoggerFactory.getLogger(ITestS3ACredentialsInURL.class);
   @Rule
   @Rule
   public Timeout testTimeout = new Timeout(30 * 60 * 1000);
   public Timeout testTimeout = new Timeout(30 * 60 * 1000);
 
 
@@ -109,7 +109,7 @@ public class TestS3ACredentialsInURL extends Assert {
       fail(text + " Test failed");
       fail(text + " Test failed");
     }
     }
   }
   }
-  
+
   /**
   /**
    * Set up some invalid credentials, verify login is rejected.
    * Set up some invalid credentials, verify login is rejected.
    * @throws Throwable
    * @throws Throwable

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AEncryption.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java

@@ -35,7 +35,7 @@ import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
  * are made for different file sizes as there have been reports that the
  * are made for different file sizes as there have been reports that the
  * file length may be rounded up to match word boundaries.
  * file length may be rounded up to match word boundaries.
  */
  */
-public class TestS3AEncryption extends AbstractS3ATestBase {
+public class ITestS3AEncryption extends AbstractS3ATestBase {
   private static final String AES256 = Constants.SERVER_SIDE_ENCRYPTION_AES256;
   private static final String AES256 = Constants.SERVER_SIDE_ENCRYPTION_AES256;
 
 
   @Override
   @Override

+ 2 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AEncryptionAlgorithmPropagation.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java

@@ -31,7 +31,8 @@ import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
  * Test whether or not encryption settings propagate by choosing an invalid
  * Test whether or not encryption settings propagate by choosing an invalid
  * one. We expect the write to fail with a 400 bad request error
  * one. We expect the write to fail with a 400 bad request error
  */
  */
-public class TestS3AEncryptionAlgorithmPropagation extends AbstractS3ATestBase {
+public class ITestS3AEncryptionAlgorithmPropagation
+    extends AbstractS3ATestBase {
 
 
   @Override
   @Override
   protected Configuration createConfiguration() {
   protected Configuration createConfiguration() {

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AEncryptionFastOutputStream.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionFastOutputStream.java

@@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration;
  * Run the encryption tests against the Fast output stream.
  * Run the encryption tests against the Fast output stream.
  * This verifies that both file writing paths can encrypt their data.
  * This verifies that both file writing paths can encrypt their data.
  */
  */
-public class TestS3AEncryptionFastOutputStream extends TestS3AEncryption {
+public class ITestS3AEncryptionFastOutputStream extends ITestS3AEncryption {
 
 
   @Override
   @Override
   protected Configuration createConfiguration() {
   protected Configuration createConfiguration() {

+ 2 - 4
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AFailureHandling.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java

@@ -24,7 +24,6 @@ import com.amazonaws.services.s3.model.AmazonS3Exception;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.InvalidRequestException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContract;
 import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
 import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
@@ -37,7 +36,6 @@ import java.io.EOFException;
 import java.io.FileNotFoundException;
 import java.io.FileNotFoundException;
 import java.nio.file.AccessDeniedException;
 import java.nio.file.AccessDeniedException;
 import java.util.concurrent.Callable;
 import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
 
 
 import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
 import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
@@ -47,9 +45,9 @@ import static org.apache.hadoop.fs.s3a.S3AUtils.*;
  * Test S3A Failure translation, including a functional test
  * Test S3A Failure translation, including a functional test
  * generating errors during stream IO.
  * generating errors during stream IO.
  */
  */
-public class TestS3AFailureHandling extends AbstractFSContractTestBase {
+public class ITestS3AFailureHandling extends AbstractFSContractTestBase {
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3AFailureHandling.class);
+      LoggerFactory.getLogger(ITestS3AFailureHandling.class);
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AFastOutputStream.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFastOutputStream.java

@@ -34,7 +34,7 @@ import java.io.IOException;
  * Tests regular and multi-part upload functionality for S3AFastOutputStream.
  * Tests regular and multi-part upload functionality for S3AFastOutputStream.
  * File sizes are kept small to reduce test duration on slow connections
  * File sizes are kept small to reduce test duration on slow connections
  */
  */
-public class TestS3AFastOutputStream {
+public class ITestS3AFastOutputStream {
   private FileSystem fs;
   private FileSystem fs;
 
 
 
 

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AFileOperationCost.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java

@@ -43,13 +43,13 @@ import static org.apache.hadoop.test.GenericTestUtils.getTestDir;
  * Use metrics to assert about the cost of file status queries.
  * Use metrics to assert about the cost of file status queries.
  * {@link S3AFileSystem#getFileStatus(Path)}.
  * {@link S3AFileSystem#getFileStatus(Path)}.
  */
  */
-public class TestS3AFileOperationCost extends AbstractFSContractTestBase {
+public class ITestS3AFileOperationCost extends AbstractFSContractTestBase {
 
 
   private MetricDiff metadataRequests;
   private MetricDiff metadataRequests;
   private MetricDiff listRequests;
   private MetricDiff listRequests;
 
 
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3AFileOperationCost.class);
+      LoggerFactory.getLogger(ITestS3AFileOperationCost.class);
 
 
   @Override
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
   protected AbstractFSContract createContract(Configuration conf) {

+ 5 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AFileSystemContract.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java

@@ -33,10 +33,10 @@ import org.apache.hadoop.fs.Path;
  *  properly making it impossible to skip the tests if we don't have a valid
  *  properly making it impossible to skip the tests if we don't have a valid
  *  bucket.
  *  bucket.
  **/
  **/
-public class TestS3AFileSystemContract extends FileSystemContractBaseTest {
+public class ITestS3AFileSystemContract extends FileSystemContractBaseTest {
 
 
   protected static final Logger LOG =
   protected static final Logger LOG =
-      LoggerFactory.getLogger(TestS3AFileSystemContract.class);
+      LoggerFactory.getLogger(ITestS3AFileSystemContract.class);
 
 
   @Override
   @Override
   public void setUp() throws Exception {
   public void setUp() throws Exception {
@@ -61,7 +61,9 @@ public class TestS3AFileSystemContract extends FileSystemContractBaseTest {
 
 
   @Override
   @Override
   public void testRenameFileAsExistingFile() throws Exception {
   public void testRenameFileAsExistingFile() throws Exception {
-    if (!renameSupported()) return;
+    if (!renameSupported()) {
+      return;
+    }
 
 
     Path src = path("/test/hadoop/file");
     Path src = path("/test/hadoop/file");
     createFile(src);
     createFile(src);

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ATemporaryCredentials.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java

@@ -48,12 +48,12 @@ import static org.apache.hadoop.fs.s3a.Constants.*;
  * should only be used against transient filesystems where you don't care about
  * should only be used against transient filesystems where you don't care about
  * the data.
  * the data.
  */
  */
-public class TestS3ATemporaryCredentials extends AbstractFSContractTestBase {
+public class ITestS3ATemporaryCredentials extends AbstractFSContractTestBase {
   public static final String TEST_STS_ENABLED = "test.fs.s3a.sts.enabled";
   public static final String TEST_STS_ENABLED = "test.fs.s3a.sts.enabled";
   public static final String TEST_STS_ENDPOINT = "test.fs.s3a.sts.endpoint";
   public static final String TEST_STS_ENDPOINT = "test.fs.s3a.sts.endpoint";
 
 
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3ATemporaryCredentials.class);
+      LoggerFactory.getLogger(ITestS3ATemporaryCredentials.class);
 
 
   private static final String PROVIDER_CLASS
   private static final String PROVIDER_CLASS
       = TemporaryAWSCredentialsProvider.NAME;
       = TemporaryAWSCredentialsProvider.NAME;

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/TestS3AFileContext.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContext.java

@@ -16,8 +16,8 @@ package org.apache.hadoop.fs.s3a.fileContext;
 import org.apache.hadoop.fs.TestFileContext;
 import org.apache.hadoop.fs.TestFileContext;
 
 
 /**
 /**
- * Implementation of TestFileContext for S3a
+ * Implementation of TestFileContext for S3a.
  */
  */
-public class TestS3AFileContext extends TestFileContext{
+public class ITestS3AFileContext extends TestFileContext{
 
 
 }
 }

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/TestS3AFileContextCreateMkdir.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextCreateMkdir.java

@@ -20,9 +20,9 @@ import org.apache.hadoop.fs.s3a.S3ATestUtils;
 import org.junit.Before;
 import org.junit.Before;
 
 
 /**
 /**
- * Extends FileContextCreateMkdirBaseTest for a S3a FileContext
+ * Extends FileContextCreateMkdirBaseTest for a S3a FileContext.
  */
  */
-public class TestS3AFileContextCreateMkdir
+public class ITestS3AFileContextCreateMkdir
         extends FileContextCreateMkdirBaseTest {
         extends FileContextCreateMkdirBaseTest {
 
 
   @Before
   @Before

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/TestS3AFileContextMainOperations.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextMainOperations.java

@@ -22,9 +22,9 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.Test;
 
 
 /**
 /**
- * S3A implementation of FileContextMainOperationsBaseTest
+ * S3A implementation of FileContextMainOperationsBaseTest.
  */
  */
-public class TestS3AFileContextMainOperations
+public class ITestS3AFileContextMainOperations
         extends FileContextMainOperationsBaseTest {
         extends FileContextMainOperationsBaseTest {
 
 
   @Before
   @Before

+ 4 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/TestS3AFileContextStatistics.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextStatistics.java

@@ -24,15 +24,16 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Before;
 
 
 /**
 /**
- * S3a implementation of FCStatisticsBaseTest
+ * S3a implementation of FCStatisticsBaseTest.
  */
  */
-public class TestS3AFileContextStatistics extends FCStatisticsBaseTest {
+public class ITestS3AFileContextStatistics extends FCStatisticsBaseTest {
 
 
   @Before
   @Before
   public void setUp() throws Exception {
   public void setUp() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     fc = S3ATestUtils.createTestFileContext(conf);
     fc = S3ATestUtils.createTestFileContext(conf);
-    fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "test"), FileContext.DEFAULT_PERM, true);
+    fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "test"),
+        FileContext.DEFAULT_PERM, true);
   }
   }
 
 
   @After
   @After

+ 4 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/TestS3AFileContextURI.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextURI.java

@@ -22,9 +22,9 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.Test;
 
 
 /**
 /**
- * S3a implementation of FileContextURIBase
+ * S3a implementation of FileContextURIBase.
  */
  */
-public class TestS3AFileContextURI extends FileContextURIBase {
+public class ITestS3AFileContextURI extends FileContextURIBase {
 
 
   @Before
   @Before
   public void setUp() throws IOException, Exception {
   public void setUp() throws IOException, Exception {
@@ -37,7 +37,8 @@ public class TestS3AFileContextURI extends FileContextURIBase {
   @Test
   @Test
   @Ignore
   @Ignore
   public void testFileStatus() throws IOException {
   public void testFileStatus() throws IOException {
-    //test disabled (the statistics tested with this method are not relevant for an S3FS)
+    // test disabled
+    // (the statistics tested with this method are not relevant for an S3FS)
   }
   }
 
 
 }
 }

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/TestS3AFileContextUtil.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextUtil.java

@@ -20,9 +20,9 @@ import org.apache.hadoop.fs.s3a.S3ATestUtils;
 import org.junit.Before;
 import org.junit.Before;
 
 
 /**
 /**
- * S3A implementation of FileContextUtilBase
+ * S3A implementation of FileContextUtilBase.
  */
  */
-public class TestS3AFileContextUtil extends FileContextUtilBase {
+public class ITestS3AFileContextUtil extends FileContextUtilBase {
 
 
   @Before
   @Before
   public void setUp() throws IOException, Exception {
   public void setUp() throws IOException, Exception {

+ 44 - 0
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java

@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a.scale;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.s3a.Constants;
+import org.junit.Test;
+
+import java.io.IOException;
+
+/**
+ * Tests file deletion with multi-delete disabled.
+ */
+public class ITestS3ADeleteFilesOneByOne extends ITestS3ADeleteManyFiles {
+
+  @Override
+  protected Configuration createConfiguration() {
+    Configuration configuration = super.createConfiguration();
+    configuration.setBoolean(Constants.ENABLE_MULTI_DELETE, false);
+    return configuration;
+  }
+
+  @Override
+  @Test
+  public void testOpenCreate() throws IOException {
+
+  }
+}

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/TestS3ADeleteManyFiles.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java

@@ -35,9 +35,9 @@ import java.util.concurrent.Future;
 /**
 /**
  * Test some scalable operations related to file renaming and deletion.
  * Test some scalable operations related to file renaming and deletion.
  */
  */
-public class TestS3ADeleteManyFiles extends S3AScaleTestBase {
+public class ITestS3ADeleteManyFiles extends S3AScaleTestBase {
   private static final Logger LOG =
   private static final Logger LOG =
-      LoggerFactory.getLogger(TestS3ADeleteManyFiles.class);
+      LoggerFactory.getLogger(ITestS3ADeleteManyFiles.class);
 
 
   /**
   /**
    * CAUTION: If this test starts failing, please make sure that the
    * CAUTION: If this test starts failing, please make sure that the

+ 2 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/TestS3ADirectoryPerformance.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java

@@ -33,9 +33,9 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
 /**
 /**
  * Test the performance of listing files/directories.
  * Test the performance of listing files/directories.
  */
  */
-public class TestS3ADirectoryPerformance extends S3AScaleTestBase {
+public class ITestS3ADirectoryPerformance extends S3AScaleTestBase {
   private static final Logger LOG = LoggerFactory.getLogger(
   private static final Logger LOG = LoggerFactory.getLogger(
-      TestS3ADirectoryPerformance.class);
+      ITestS3ADirectoryPerformance.class);
 
 
   @Test
   @Test
   public void testListOperations() throws Throwable {
   public void testListOperations() throws Throwable {

+ 4 - 4
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/TestS3AInputStreamPerformance.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java

@@ -50,9 +50,9 @@ import static org.apache.hadoop.fs.s3a.Constants.*;
 /**
 /**
  * Look at the performance of S3a operations.
  * Look at the performance of S3a operations.
  */
  */
-public class TestS3AInputStreamPerformance extends S3AScaleTestBase {
+public class ITestS3AInputStreamPerformance extends S3AScaleTestBase {
   private static final Logger LOG = LoggerFactory.getLogger(
   private static final Logger LOG = LoggerFactory.getLogger(
-      TestS3AInputStreamPerformance.class);
+      ITestS3AInputStreamPerformance.class);
 
 
   private S3AFileSystem s3aFS;
   private S3AFileSystem s3aFS;
   private Path testData;
   private Path testData;
@@ -223,7 +223,7 @@ public class TestS3AInputStreamPerformance extends S3AScaleTestBase {
       int reads = 0;
       int reads = 0;
       while (remaining > 0) {
       while (remaining > 0) {
         int bytesRead = in.read(block, offset, remaining);
         int bytesRead = in.read(block, offset, remaining);
-        reads ++;
+        reads++;
         if (bytesRead == 1) {
         if (bytesRead == 1) {
           break;
           break;
         }
         }
@@ -233,7 +233,7 @@ public class TestS3AInputStreamPerformance extends S3AScaleTestBase {
       }
       }
       blockTimer.end("Reading block %d in %d reads", i, reads);
       blockTimer.end("Reading block %d in %d reads", i, reads);
     }
     }
-    timer2.end("Time to read %d bytes in %d blocks", len, blockCount );
+    timer2.end("Time to read %d bytes in %d blocks", len, blockCount);
     bandwidth(timer2, count);
     bandwidth(timer2, count);
     logStreamStatistics();
     logStreamStatistics();
   }
   }

+ 5 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3A.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java

@@ -36,7 +36,10 @@ import org.apache.hadoop.fs.s3a.S3ATestUtils;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertTrue;
 
 
-public class TestS3A {
+/**
+ * S3A tests through the {@link FileContext} API.
+ */
+public class ITestS3A {
   private FileContext fc;
   private FileContext fc;
 
 
   @Rule
   @Rule
@@ -75,7 +78,7 @@ public class TestS3A {
   @Test
   @Test
   public void testS3ACreateFileInSubDir() throws Exception {
   public void testS3ACreateFileInSubDir() throws Exception {
     Path dirPath = getTestPath();
     Path dirPath = getTestPath();
-    fc.mkdir(dirPath,FileContext.DIR_DEFAULT_PERM,true);
+    fc.mkdir(dirPath, FileContext.DIR_DEFAULT_PERM, true);
     Path filePath = new Path(dirPath, "file");
     Path filePath = new Path(dirPath, "file");
     try (FSDataOutputStream file = fc.create(filePath, EnumSet.of(CreateFlag
     try (FSDataOutputStream file = fc.create(filePath, EnumSet.of(CreateFlag
         .CREATE))) {
         .CREATE))) {

+ 9 - 5
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3AMiniYarnCluster.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java

@@ -45,7 +45,10 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
 
 
-public class TestS3AMiniYarnCluster {
+/**
+ * Tests that S3A is usable through a YARN application.
+ */
+public class ITestS3AMiniYarnCluster {
 
 
   private final Configuration conf = new YarnConfiguration();
   private final Configuration conf = new YarnConfiguration();
   private S3AFileSystem fs;
   private S3AFileSystem fs;
@@ -105,9 +108,10 @@ public class TestS3AMiniYarnCluster {
   }
   }
 
 
   /**
   /**
-   * helper method
+   * helper method.
    */
    */
-  private Map<String, Integer> getResultAsMap(String outputAsStr) throws IOException {
+  private Map<String, Integer> getResultAsMap(String outputAsStr)
+      throws IOException {
     Map<String, Integer> result = new HashMap<>();
     Map<String, Integer> result = new HashMap<>();
     for (String line : outputAsStr.split("\n")) {
     for (String line : outputAsStr.split("\n")) {
       String[] tokens = line.split("\t");
       String[] tokens = line.split("\t");
@@ -117,7 +121,7 @@ public class TestS3AMiniYarnCluster {
   }
   }
 
 
   /**
   /**
-   * helper method
+   * helper method.
    */
    */
   private void writeStringToFile(Path path, String string) throws IOException {
   private void writeStringToFile(Path path, String string) throws IOException {
     FileContext fc = S3ATestUtils.createTestFileContext(conf);
     FileContext fc = S3ATestUtils.createTestFileContext(conf);
@@ -128,7 +132,7 @@ public class TestS3AMiniYarnCluster {
   }
   }
 
 
   /**
   /**
-   * helper method
+   * helper method.
    */
    */
   private String readStringFromFile(Path path) {
   private String readStringFromFile(Path path) {
     try (FSDataInputStream in = fs.open(path)) {
     try (FSDataInputStream in = fs.open(path)) {

+ 5 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestInMemoryNativeS3FileSystemContract.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestInMemoryNativeS3FileSystemContract.java

@@ -20,8 +20,11 @@ package org.apache.hadoop.fs.s3native;
 
 
 import java.io.IOException;
 import java.io.IOException;
 
 
-public class TestInMemoryNativeS3FileSystemContract
-  extends NativeS3FileSystemContractBaseTest {
+/**
+ * S3N basic contract tests through mock in-memory S3 implementation.
+ */
+public class ITestInMemoryNativeS3FileSystemContract
+    extends NativeS3FileSystemContractBaseTest {
 
 
   @Override
   @Override
   NativeFileSystemStore getNativeFileSystemStore() throws IOException {
   NativeFileSystemStore getNativeFileSystemStore() throws IOException {

+ 7 - 3
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeFileSystemStore.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeFileSystemStore.java

@@ -40,8 +40,10 @@ import java.security.DigestOutputStream;
 import java.security.MessageDigest;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.security.NoSuchAlgorithmException;
 
 
-
-public class TestJets3tNativeFileSystemStore {
+/**
+ * S3N tests through live S3 service.
+ */
+public class ITestJets3tNativeFileSystemStore {
   private Configuration conf;
   private Configuration conf;
   private Jets3tNativeFileSystemStore store;
   private Jets3tNativeFileSystemStore store;
   private NativeS3FileSystem fs;
   private NativeS3FileSystem fs;
@@ -98,7 +100,9 @@ public class TestJets3tNativeFileSystemStore {
     InputStream in = new BufferedInputStream(
     InputStream in = new BufferedInputStream(
         new DigestInputStream(fs.open(copyPath), digest2));
         new DigestInputStream(fs.open(copyPath), digest2));
     long copyLen = 0;
     long copyLen = 0;
-    while (in.read() != -1) {copyLen++;}
+    while (in.read() != -1) {
+      copyLen++;
+    }
     in.close();
     in.close();
 
 
     assertEquals("Copy length matches original", len, copyLen);
     assertEquals("Copy length matches original", len, copyLen);

+ 5 - 2
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeS3FileSystemContract.java → hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeS3FileSystemContract.java

@@ -20,8 +20,11 @@ package org.apache.hadoop.fs.s3native;
 
 
 import java.io.IOException;
 import java.io.IOException;
 
 
-public class TestJets3tNativeS3FileSystemContract
-  extends NativeS3FileSystemContractBaseTest {
+/**
+ * S3N basic contract tests through live S3 service.
+ */
+public class ITestJets3tNativeS3FileSystemContract
+    extends NativeS3FileSystemContractBaseTest {
 
 
   @Override
   @Override
   NativeFileSystemStore getNativeFileSystemStore() throws IOException {
   NativeFileSystemStore getNativeFileSystemStore() throws IOException {