Sfoglia il codice sorgente

HADOOP-16205 Backport ABFS driver from trunk to branch 2.0: Fix build and test failures.

Contributed by Yuan Gao.
Yuan Gao 6 anni fa
parent
commit
18f66a05cf
15 ha cambiato i file con 243 aggiunte e 105 eliminazioni
  1. 19 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractConcatTest.java
  2. 19 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractGetFileStatusTest.java
  3. 6 6
      hadoop-tools/hadoop-azure/pom.xml
  4. 1 2
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java
  5. 6 1
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsOutputStream.java
  6. 4 3
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java
  7. 8 2
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
  8. 1 1
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java
  9. 106 54
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAuthorization.java
  10. 15 4
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java
  11. 22 6
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemE2E.java
  12. 10 4
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemListStatus.java
  13. 15 8
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestGetNameSpaceEnabled.java
  14. 3 3
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java
  15. 8 5
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockAbfsAuthorizer.java

+ 19 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractConcatTest.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.contract;
 
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 
 
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Test;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
@@ -62,14 +63,24 @@ public abstract class AbstractContractConcatTest extends AbstractFSContractTestB
   public void testConcatEmptyFiles() throws Throwable {
   public void testConcatEmptyFiles() throws Throwable {
     touch(getFileSystem(), target);
     touch(getFileSystem(), target);
     handleExpectedException(intercept(Exception.class,
     handleExpectedException(intercept(Exception.class,
-        () -> getFileSystem().concat(target, new Path[0])));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            getFileSystem().concat(target, new Path[0]);
+          }
+        }));
   }
   }
 
 
   @Test
   @Test
   public void testConcatMissingTarget() throws Throwable {
   public void testConcatMissingTarget() throws Throwable {
     handleExpectedException(
     handleExpectedException(
         intercept(Exception.class,
         intercept(Exception.class,
-            () -> getFileSystem().concat(target, new Path[]{zeroByteFile})));
+            new LambdaTestUtils.VoidCallable() {
+              @Override
+              public void call() throws Exception {
+                getFileSystem().concat(target, new Path[]{zeroByteFile});
+              }
+        }));
   }
   }
 
 
   @Test
   @Test
@@ -90,7 +101,12 @@ public abstract class AbstractContractConcatTest extends AbstractFSContractTestB
     byte[] block = dataset(TEST_FILE_LEN, 0, 255);
     byte[] block = dataset(TEST_FILE_LEN, 0, 255);
     createFile(getFileSystem(), target, false, block);
     createFile(getFileSystem(), target, false, block);
     handleExpectedException(intercept(Exception.class,
     handleExpectedException(intercept(Exception.class,
-        () -> getFileSystem().concat(target, new Path[]{target})));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            getFileSystem().concat(target, new Path[]{target});
+          }
+        }));
   }
   }
 
 
 }
 }

+ 19 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractGetFileStatusTest.java

@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
@@ -277,21 +278,36 @@ public abstract class AbstractContractGetFileStatusTest extends
   public void testLocatedStatusNoDir() throws Throwable {
   public void testLocatedStatusNoDir() throws Throwable {
     describe("test the LocatedStatus call on a path which is not present");
     describe("test the LocatedStatus call on a path which is not present");
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-        () -> getFileSystem().listLocatedStatus(path("missing")));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            getFileSystem().listLocatedStatus(path("missing"));
+          }
+        });
   }
   }
 
 
   @Test
   @Test
   public void testListStatusNoDir() throws Throwable {
   public void testListStatusNoDir() throws Throwable {
     describe("test the listStatus(path) call on a path which is not present");
     describe("test the listStatus(path) call on a path which is not present");
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-        () -> getFileSystem().listStatus(path("missing")));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            getFileSystem().listStatus(path("missing"));
+          }
+        });
   }
   }
 
 
   @Test
   @Test
   public void testListStatusFilteredNoDir() throws Throwable {
   public void testListStatusFilteredNoDir() throws Throwable {
     describe("test the listStatus(path, filter) call on a missing path");
     describe("test the listStatus(path, filter) call on a missing path");
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-        () -> getFileSystem().listStatus(path("missing"), ALL_PATHS));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            getFileSystem().listStatus(path("missing"), ALL_PATHS);
+          }
+        });
   }
   }
 
 
   @Test
   @Test

+ 6 - 6
hadoop-tools/hadoop-azure/pom.xml

@@ -159,13 +159,13 @@
       <groupId>com.google.guava</groupId>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
       <artifactId>guava</artifactId>
     </dependency>
     </dependency>
-
+<!--
     <dependency>
     <dependency>
       <groupId>org.eclipse.jetty</groupId>
       <groupId>org.eclipse.jetty</groupId>
       <artifactId>jetty-util-ajax</artifactId>
       <artifactId>jetty-util-ajax</artifactId>
       <scope>compile</scope>
       <scope>compile</scope>
     </dependency>
     </dependency>
-
+-->
     <dependency>
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
       <artifactId>jackson-mapper-asl</artifactId>
@@ -177,13 +177,13 @@
       <artifactId>jackson-core-asl</artifactId>
       <artifactId>jackson-core-asl</artifactId>
       <scope>compile</scope>
       <scope>compile</scope>
     </dependency>
     </dependency>
-
+<!--
     <dependency>
     <dependency>
       <groupId>org.wildfly.openssl</groupId>
       <groupId>org.wildfly.openssl</groupId>
       <artifactId>wildfly-openssl</artifactId>
       <artifactId>wildfly-openssl</artifactId>
       <scope>compile</scope>
       <scope>compile</scope>
     </dependency>
     </dependency>
-
+-->
     <!--com.fasterxml.jackson is used by WASB, not ABFS-->
     <!--com.fasterxml.jackson is used by WASB, not ABFS-->
     <!--transitive dependency from Azure SDK-->
     <!--transitive dependency from Azure SDK-->
     <dependency>
     <dependency>
@@ -237,13 +237,13 @@
       <artifactId>log4j</artifactId>
       <artifactId>log4j</artifactId>
       <scope>test</scope>
       <scope>test</scope>
     </dependency>
     </dependency>
-
+<!--
     <dependency>
     <dependency>
       <groupId>javax.ws.rs</groupId>
       <groupId>javax.ws.rs</groupId>
       <artifactId>jsr311-api</artifactId>
       <artifactId>jsr311-api</artifactId>
       <scope>test</scope>
       <scope>test</scope>
     </dependency>
     </dependency>
-
+-->
     <dependency>
     <dependency>
       <groupId>org.mockito</groupId>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-all</artifactId>
       <artifactId>mockito-all</artifactId>

+ 1 - 2
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java

@@ -973,8 +973,7 @@ public class AzureBlobFileSystemStore {
               owner,
               owner,
               group,
               group,
               null,
               null,
-              path,
-              hasAcl, false, false);
+              path);
 
 
       this.version = version;
       this.version = version;
     }
     }

+ 6 - 1
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsOutputStream.java

@@ -89,7 +89,7 @@ public class AbfsOutputStream extends OutputStream implements Syncable, StreamCa
         maxConcurrentRequestCount,
         maxConcurrentRequestCount,
         10L,
         10L,
         TimeUnit.SECONDS,
         TimeUnit.SECONDS,
-        new LinkedBlockingQueue<>());
+        new LinkedBlockingQueue<Runnable>());
     this.completionService = new ExecutorCompletionService<>(this.threadExecutor);
     this.completionService = new ExecutorCompletionService<>(this.threadExecutor);
   }
   }
 
 
@@ -244,6 +244,11 @@ public class AbfsOutputStream extends OutputStream implements Syncable, StreamCa
     }
     }
   }
   }
 
 
+  @Override
+  public void sync() throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
   private synchronized void flushInternal(boolean isClose) throws IOException {
   private synchronized void flushInternal(boolean isClose) throws IOException {
     maybeThrowLastError();
     maybeThrowLastError();
     writeCurrentBufferToService();
     writeCurrentBufferToService();

+ 4 - 3
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java

@@ -32,8 +32,9 @@ import javax.net.ssl.SSLSocketFactory;
 
 
 import org.slf4j.Logger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
+/*
 import org.wildfly.openssl.OpenSSLProvider;
 import org.wildfly.openssl.OpenSSLProvider;
-
+*/
 
 
 /**
 /**
  * Extension to use native OpenSSL library instead of JSSE for better
  * Extension to use native OpenSSL library instead of JSSE for better
@@ -85,11 +86,11 @@ public final class SSLSocketFactoryEx extends SSLSocketFactory {
   public static SSLSocketFactoryEx getDefaultFactory() {
   public static SSLSocketFactoryEx getDefaultFactory() {
     return instance;
     return instance;
   }
   }
-
+/*
   static {
   static {
     OpenSSLProvider.register();
     OpenSSLProvider.register();
   }
   }
-
+*/
   private SSLSocketFactoryEx(SSLChannelMode preferredChannelMode)
   private SSLSocketFactoryEx(SSLChannelMode preferredChannelMode)
       throws IOException {
       throws IOException {
     try {
     try {

+ 8 - 2
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.azurebfs;
 
 
 import java.util.UUID;
 import java.util.UUID;
 
 
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Assert;
 import org.junit.Assert;
 import org.junit.Ignore;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.Test;
@@ -61,7 +62,7 @@ public final class ITestAbfsClient extends AbstractAbfsIntegrationTest {
   public void testUnknownHost() throws Exception {
   public void testUnknownHost() throws Exception {
     // When hitting hostName not found exception, the retry will take about 14 mins until failed.
     // When hitting hostName not found exception, the retry will take about 14 mins until failed.
     // This test is to verify that the "Unknown host name: %s. Retrying to resolve the host name..." is logged as warning during the retry.
     // This test is to verify that the "Unknown host name: %s. Retrying to resolve the host name..." is logged as warning during the retry.
-    AbfsConfiguration conf = this.getConfiguration();
+    final AbfsConfiguration conf = this.getConfiguration();
     String accountName = this.getAccountName();
     String accountName = this.getAccountName();
     String fakeAccountName = "fake" + UUID.randomUUID() + accountName.substring(accountName.indexOf("."));
     String fakeAccountName = "fake" + UUID.randomUUID() + accountName.substring(accountName.indexOf("."));
 
 
@@ -71,6 +72,11 @@ public final class ITestAbfsClient extends AbstractAbfsIntegrationTest {
 
 
     intercept(AbfsRestOperationException.class,
     intercept(AbfsRestOperationException.class,
             "UnknownHostException: " + fakeAccountName,
             "UnknownHostException: " + fakeAccountName,
-            () -> FileSystem.get(conf.getRawConfiguration()));
+              new LambdaTestUtils.VoidCallable() {
+                @Override
+                public void call() throws Exception {
+                  FileSystem.get(conf.getRawConfiguration());
+                }
+              });
   }
   }
 }
 }

+ 1 - 1
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java

@@ -239,7 +239,7 @@ public class ITestAbfsIdentityTransformer extends AbstractAbfsScaleTest{
 
 
     List<AclEntry> aclEntriesToBeTransformed = Lists.newArrayList(
     List<AclEntry> aclEntriesToBeTransformed = Lists.newArrayList(
             aclEntry(ACCESS, USER, DAEMON, ALL),
             aclEntry(ACCESS, USER, DAEMON, ALL),
-            aclEntry(ACCESS, USER, FULLY_QUALIFIED_NAME,ALL),
+            aclEntry(ACCESS, USER, FULLY_QUALIFIED_NAME, ALL),
             aclEntry(DEFAULT, USER, SUPER_USER, ALL),
             aclEntry(DEFAULT, USER, SUPER_USER, ALL),
             aclEntry(DEFAULT, USER, SERVICE_PRINCIPAL_ID, ALL),
             aclEntry(DEFAULT, USER, SERVICE_PRINCIPAL_ID, ALL),
             aclEntry(DEFAULT, USER, SHORT_NAME, ALL),
             aclEntry(DEFAULT, USER, SHORT_NAME, ALL),

+ 106 - 54
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAuthorization.java

@@ -22,6 +22,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.List;
 import java.util.UUID;
 import java.util.UUID;
 
 
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
@@ -69,9 +70,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
   public void testOpenFileWithInvalidPath() throws Exception {
   public void testOpenFileWithInvalidPath() throws Exception {
     final AzureBlobFileSystem fs = this.getFileSystem();
     final AzureBlobFileSystem fs = this.getFileSystem();
     intercept(IllegalArgumentException.class,
     intercept(IllegalArgumentException.class,
-        ()-> {
-          fs.open(new Path("")).close();
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.open(new Path("")).close();
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -86,9 +90,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = this.getFileSystem();
     final AzureBlobFileSystem fs = this.getFileSystem();
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.open(TEST_WRITE_ONLY_FILE_PATH_0).close();
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.open(TEST_WRITE_ONLY_FILE_PATH_0).close();
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -101,9 +108,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
   public void testCreateFileUnauthorized() throws Exception {
   public void testCreateFileUnauthorized() throws Exception {
     final AzureBlobFileSystem fs = this.getFileSystem();
     final AzureBlobFileSystem fs = this.getFileSystem();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.create(TEST_READ_ONLY_FILE_PATH_0).close();
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.create(TEST_READ_ONLY_FILE_PATH_0).close();
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -118,9 +128,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = this.getFileSystem();
     final AzureBlobFileSystem fs = this.getFileSystem();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.append(TEST_WRITE_THEN_READ_ONLY_PATH).close();
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.append(TEST_WRITE_THEN_READ_ONLY_PATH).close();
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -133,9 +146,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
   public void testRenameUnauthorized() throws Exception {
   public void testRenameUnauthorized() throws Exception {
     final AzureBlobFileSystem fs = this.getFileSystem();
     final AzureBlobFileSystem fs = this.getFileSystem();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.rename(TEST_WRITE_ONLY_FILE_PATH_0, TEST_WRITE_ONLY_FILE_PATH_1);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.rename(TEST_WRITE_ONLY_FILE_PATH_0, TEST_WRITE_ONLY_FILE_PATH_1);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -150,9 +166,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = this.getFileSystem();
     final AzureBlobFileSystem fs = this.getFileSystem();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.delete(TEST_WRITE_THEN_READ_ONLY_PATH, false);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.delete(TEST_WRITE_THEN_READ_ONLY_PATH, false);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -167,9 +186,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.listStatus(TEST_WRITE_ONLY_FILE_PATH_0);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.listStatus(TEST_WRITE_ONLY_FILE_PATH_0);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -182,9 +204,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
   public void testMkDirsUnauthorized() throws Exception {
   public void testMkDirsUnauthorized() throws Exception {
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.mkdirs(TEST_READ_ONLY_FOLDER_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.mkdirs(TEST_READ_ONLY_FOLDER_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -199,9 +224,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.getFileStatus(TEST_WRITE_ONLY_FILE_PATH_0);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.getFileStatus(TEST_WRITE_ONLY_FILE_PATH_0);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -218,9 +246,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.setOwner(TEST_WRITE_THEN_READ_ONLY_PATH, TEST_USER, TEST_GROUP);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.setOwner(TEST_WRITE_THEN_READ_ONLY_PATH, TEST_USER, TEST_GROUP);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -237,9 +268,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.setPermission(TEST_WRITE_THEN_READ_ONLY_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.setPermission(TEST_WRITE_THEN_READ_ONLY_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -256,11 +290,14 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
-    List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
+    final List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.modifyAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.modifyAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -277,11 +314,14 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
-    List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
+    final List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.removeAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.removeAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -298,9 +338,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.removeDefaultAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.removeDefaultAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -317,9 +360,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.removeAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.removeAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -336,11 +382,14 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
     fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
-    List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
+    final List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.setAcl(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.setAcl(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -359,8 +408,11 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
     List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
     List<AclEntry> aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
     intercept(AbfsAuthorizationException.class,
     intercept(AbfsAuthorizationException.class,
-        ()-> {
-          fs.getAclStatus(TEST_WRITE_ONLY_FILE_PATH_0);
-    });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.getAclStatus(TEST_WRITE_ONLY_FILE_PATH_0);
+          }
+        });
   }
   }
 }
 }

+ 15 - 4
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java

@@ -26,6 +26,7 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.Future;
 
 
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
@@ -69,12 +70,17 @@ public class ITestAzureBlobFileSystemDelete extends
   @Test()
   @Test()
   public void testOpenFileAfterDelete() throws Exception {
   public void testOpenFileAfterDelete() throws Exception {
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
-    Path testfile = new Path("/testFile");
+    final Path testfile = new Path("/testFile");
     touch(testfile);
     touch(testfile);
     assertDeleted(fs, testfile, false);
     assertDeleted(fs, testfile, false);
 
 
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-        () -> fs.open(testfile));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.open(testfile);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -122,10 +128,15 @@ public class ITestAzureBlobFileSystemDelete extends
     }
     }
 
 
     es.shutdownNow();
     es.shutdownNow();
-    Path dir = new Path("/test");
+    final Path dir = new Path("/test");
     // first try a non-recursive delete, expect failure
     // first try a non-recursive delete, expect failure
     intercept(FileAlreadyExistsException.class,
     intercept(FileAlreadyExistsException.class,
-        () -> fs.delete(dir, false));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.delete(dir, false);
+          }
+        });
     assertDeleted(fs, dir, true);
     assertDeleted(fs, dir, true);
     assertPathDoesNotExist(fs, "deleted", dir);
     assertPathDoesNotExist(fs, "deleted", dir);
 
 

+ 22 - 6
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemE2E.java

@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Random;
 import java.util.Random;
 
 
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
@@ -174,12 +175,17 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
     final Path testFilePath = new Path(methodName.getMethodName());
     final Path testFilePath = new Path(methodName.getMethodName());
     testWriteOneByteToFile(testFilePath);
     testWriteOneByteToFile(testFilePath);
 
 
-    FSDataInputStream inputStream = fs.open(testFilePath, TEST_DEFAULT_BUFFER_SIZE);
+    final FSDataInputStream inputStream = fs.open(testFilePath, TEST_DEFAULT_BUFFER_SIZE);
     fs.delete(testFilePath, true);
     fs.delete(testFilePath, true);
     assertFalse(fs.exists(testFilePath));
     assertFalse(fs.exists(testFilePath));
 
 
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-            () -> inputStream.read(new byte[1]));
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            inputStream.read(new byte[1]);
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -187,7 +193,7 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     final Path testFilePath = new Path(methodName.getMethodName());
     final Path testFilePath = new Path(methodName.getMethodName());
 
 
-    FSDataOutputStream stream = fs.create(testFilePath);
+    final FSDataOutputStream stream = fs.create(testFilePath);
     assertTrue(fs.exists(testFilePath));
     assertTrue(fs.exists(testFilePath));
     stream.write(TEST_BYTE);
     stream.write(TEST_BYTE);
 
 
@@ -196,7 +202,12 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
 
 
     // trigger append call
     // trigger append call
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-            () -> stream.close());
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            stream.close();
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -204,14 +215,19 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
     final AzureBlobFileSystem fs = getFileSystem();
     final AzureBlobFileSystem fs = getFileSystem();
     final Path testFilePath = new Path(methodName.getMethodName());
     final Path testFilePath = new Path(methodName.getMethodName());
 
 
-    FSDataOutputStream stream = fs.create(testFilePath);
+    final FSDataOutputStream stream = fs.create(testFilePath);
     assertTrue(fs.exists(testFilePath));
     assertTrue(fs.exists(testFilePath));
 
 
     fs.delete(testFilePath, true);
     fs.delete(testFilePath, true);
     assertFalse(fs.exists(testFilePath));
     assertFalse(fs.exists(testFilePath));
 
 
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-            () -> stream.close());
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            stream.close();
+          }
+        });
   }
   }
 
 
   private void testWriteOneByteToFile(Path testFilePath) throws Exception {
   private void testWriteOneByteToFile(Path testFilePath) throws Exception {

+ 10 - 4
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemListStatus.java

@@ -27,6 +27,7 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.Future;
 
 
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -38,7 +39,7 @@ import org.apache.hadoop.fs.contract.ContractTestUtils;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.assertMkdirs;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.assertMkdirs;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.assertPathExists;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.assertPathExists;
-import static org.apache.hadoop.fs.contract.ContractTestUtils.rename;
+import static org.apache.hadoop.fs.contract.ContractTestUtils.assertRenameOutcome;
 
 
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 
 
@@ -136,7 +137,7 @@ public class ITestAzureBlobFileSystemListStatus extends
     assertEquals(1, fileStatuses.length);
     assertEquals(1, fileStatuses.length);
     assertEquals("sub", fileStatuses[0].getPath().getName());
     assertEquals("sub", fileStatuses[0].getPath().getName());
     assertIsDirectoryReference(fileStatuses[0]);
     assertIsDirectoryReference(fileStatuses[0]);
-    Path childF = fs.makeQualified(new Path("/test/f"));
+    final Path childF = fs.makeQualified(new Path("/test/f"));
     touch(childF);
     touch(childF);
     fileStatuses = fs.listStatus(testDir);
     fileStatuses = fs.listStatus(testDir);
     assertEquals(2, fileStatuses.length);
     assertEquals(2, fileStatuses.length);
@@ -154,7 +155,12 @@ public class ITestAzureBlobFileSystemListStatus extends
 
 
     fs.delete(testDir, true);
     fs.delete(testDir, true);
     intercept(FileNotFoundException.class,
     intercept(FileNotFoundException.class,
-        () -> fs.listFiles(childF, false).next());
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.listFiles(childF, false).next();
+          }
+        });
 
 
     // do some final checks on the status (failing due to version checks)
     // do some final checks on the status (failing due to version checks)
     assertEquals("Path mismatch of " + locatedChildStatus,
     assertEquals("Path mismatch of " + locatedChildStatus,
@@ -228,7 +234,7 @@ public class ITestAzureBlobFileSystemListStatus extends
 
 
     createFile(fs, nonTrailingPeriodFile, false, new byte[0]);
     createFile(fs, nonTrailingPeriodFile, false, new byte[0]);
     try {
     try {
-    rename(fs, nonTrailingPeriodFile, trailingPeriodFile);
+      assertRenameOutcome(fs, nonTrailingPeriodFile, trailingPeriodFile, true);
     }
     }
     catch(IllegalArgumentException e) {
     catch(IllegalArgumentException e) {
       exceptionThrown = true;
       exceptionThrown = true;

+ 15 - 8
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestGetNameSpaceEnabled.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.azurebfs;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.UUID;
 import java.util.UUID;
 
 
+import org.apache.hadoop.test.LambdaTestUtils;
 import org.junit.Assume;
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.Test;
 
 
@@ -65,13 +66,16 @@ public class ITestGetNameSpaceEnabled extends AbstractAbfsIntegrationTest {
     String testUri = this.getTestUrl();
     String testUri = this.getTestUrl();
     String nonExistingFsUrl = getAbfsScheme() + "://" + UUID.randomUUID()
     String nonExistingFsUrl = getAbfsScheme() + "://" + UUID.randomUUID()
             + testUri.substring(testUri.indexOf("@"));
             + testUri.substring(testUri.indexOf("@"));
-    AzureBlobFileSystem fs = this.getFileSystem(nonExistingFsUrl);
+    final AzureBlobFileSystem fs = this.getFileSystem(nonExistingFsUrl);
 
 
     intercept(AbfsRestOperationException.class,
     intercept(AbfsRestOperationException.class,
             "\"The specified filesystem does not exist.\", 404",
             "\"The specified filesystem does not exist.\", 404",
-            ()-> {
-              fs.getIsNamespaceEnabled();
-            });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.getIsNamespaceEnabled();
+          }
+        });
   }
   }
 
 
   @Test
   @Test
@@ -86,11 +90,14 @@ public class ITestGetNameSpaceEnabled extends AbstractAbfsIntegrationTest {
     secret = (char) (secret.charAt(0) + 1) + secret.substring(1);
     secret = (char) (secret.charAt(0) + 1) + secret.substring(1);
     config.set(configkKey, secret);
     config.set(configkKey, secret);
 
 
-    AzureBlobFileSystem fs = this.getFileSystem(config);
+    final AzureBlobFileSystem fs = this.getFileSystem(config);
     intercept(AbfsRestOperationException.class,
     intercept(AbfsRestOperationException.class,
             "\"Server failed to authenticate the request. Make sure the value of Authorization header is formed correctly including the signature.\", 403",
             "\"Server failed to authenticate the request. Make sure the value of Authorization header is formed correctly including the signature.\", 403",
-            ()-> {
-              fs.getIsNamespaceEnabled();
-            });
+        new LambdaTestUtils.VoidCallable() {
+          @Override
+          public void call() throws Exception {
+            fs.getIsNamespaceEnabled();
+          }
+        });
   }
   }
 }
 }

+ 3 - 3
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.fs.azurebfs;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.lang.reflect.Field;
 import java.lang.reflect.Field;
+import java.nio.charset.StandardCharsets;
 
 
-import org.apache.commons.codec.Charsets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
 import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
 import org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys;
 import org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys;
@@ -98,8 +98,8 @@ public class TestAbfsConfigurationFieldsValidation {
   public TestAbfsConfigurationFieldsValidation() throws Exception {
   public TestAbfsConfigurationFieldsValidation() throws Exception {
     super();
     super();
     this.accountName = "testaccount1.blob.core.windows.net";
     this.accountName = "testaccount1.blob.core.windows.net";
-    this.encodedString = Base64.encode("base64Value".getBytes(Charsets.UTF_8));
-    this.encodedAccountKey = Base64.encode("someAccountKey".getBytes(Charsets.UTF_8));
+    this.encodedString = Base64.encode("base64Value".getBytes(StandardCharsets.UTF_8));
+    this.encodedAccountKey = Base64.encode("someAccountKey".getBytes(StandardCharsets.UTF_8));
     Configuration configuration = new Configuration();
     Configuration configuration = new Configuration();
     configuration.addResource(TestConfigurationKeys.TEST_CONFIGURATION_FILE_NAME);
     configuration.addResource(TestConfigurationKeys.TEST_CONFIGURATION_FILE_NAME);
     configuration.set(INT_KEY, "1234565");
     configuration.set(INT_KEY, "1234565");

+ 8 - 5
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockAbfsAuthorizer.java

@@ -21,8 +21,6 @@ package org.apache.hadoop.fs.azurebfs.extensions;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.HashSet;
 import java.util.HashSet;
 import java.util.Set;
 import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
@@ -46,6 +44,8 @@ public class MockAbfsAuthorizer implements AbfsAuthorizer {
   private Set<Path> readOnlyPaths = new HashSet<Path>();
   private Set<Path> readOnlyPaths = new HashSet<Path>();
   private Set<Path> writeOnlyPaths = new HashSet<Path>();
   private Set<Path> writeOnlyPaths = new HashSet<Path>();
   private Set<Path> readWritePaths = new HashSet<Path>();
   private Set<Path> readWritePaths = new HashSet<Path>();
+  private Set<Path> readPaths = new HashSet<>();
+  private Set<Path> writePaths = new HashSet<>();
   private int writeThenReadOnly = 0;
   private int writeThenReadOnly = 0;
   public MockAbfsAuthorizer(Configuration conf) {
   public MockAbfsAuthorizer(Configuration conf) {
     this.conf = conf;
     this.conf = conf;
@@ -61,6 +61,10 @@ public class MockAbfsAuthorizer implements AbfsAuthorizer {
     writeOnlyPaths.add(new Path(TEST_WRITE_ONLY_FOLDER));
     writeOnlyPaths.add(new Path(TEST_WRITE_ONLY_FOLDER));
     readWritePaths.add(new Path(TEST_READ_WRITE_FILE_0));
     readWritePaths.add(new Path(TEST_READ_WRITE_FILE_0));
     readWritePaths.add(new Path(TEST_READ_WRITE_FILE_1));
     readWritePaths.add(new Path(TEST_READ_WRITE_FILE_1));
+    readPaths.addAll(readOnlyPaths);
+    readPaths.addAll(readWritePaths);
+    writePaths.addAll(writeOnlyPaths);
+    writePaths.addAll(readWritePaths);
   }
   }
 
 
   @Override
   @Override
@@ -70,12 +74,11 @@ public class MockAbfsAuthorizer implements AbfsAuthorizer {
       paths.add(new Path(path.getName()));
       paths.add(new Path(path.getName()));
     }
     }
 
 
-    if (action.equals(FsAction.READ) && Stream.concat(readOnlyPaths.stream(), readWritePaths.stream()).collect(Collectors.toSet()).containsAll(paths)) {
+    if (action.equals(FsAction.READ) && readPaths.containsAll(paths)) {
       return true;
       return true;
     } else if (action.equals(FsAction.READ) && paths.contains(new Path(TEST_WRITE_THEN_READ_ONLY)) && writeThenReadOnly == 1) {
     } else if (action.equals(FsAction.READ) && paths.contains(new Path(TEST_WRITE_THEN_READ_ONLY)) && writeThenReadOnly == 1) {
       return true;
       return true;
-    } else if (action.equals(FsAction.WRITE)
-        && Stream.concat(writeOnlyPaths.stream(), readWritePaths.stream()).collect(Collectors.toSet()).containsAll(paths)) {
+    } else if (action.equals(FsAction.WRITE) && writePaths.containsAll(paths)) {
       return true;
       return true;
     } else if (action.equals(FsAction.WRITE) && paths.contains(new Path(TEST_WRITE_THEN_READ_ONLY)) && writeThenReadOnly == 0) {
     } else if (action.equals(FsAction.WRITE) && paths.contains(new Path(TEST_WRITE_THEN_READ_ONLY)) && writeThenReadOnly == 0) {
       writeThenReadOnly = 1;
       writeThenReadOnly = 1;