瀏覽代碼

HADOOP-19367. Fix setting final field value on Java 17 (#7228)

(cherry picked from commit 6cb2e866a6cb1261daabc9314b0047f152824c94)
Cheng Pan 4 月之前
父節點
當前提交
46c1ee1a38

+ 25 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/ReflectionUtils.java

@@ -18,6 +18,8 @@
 package org.apache.hadoop.test;
 
 import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
 
 public final class ReflectionUtils {
   private ReflectionUtils() {}
@@ -48,4 +50,27 @@ public final class ReflectionUtils {
       return null;
     }
   }
+
+  public static <T> void setFinalField(
+          Class<T> type, final T obj, final String fieldName, Object value)
+          throws ReflectiveOperationException {
+    Field f = type.getDeclaredField(fieldName);
+    f.setAccessible(true);
+    Field modifiersField = ReflectionUtils.getModifiersField();
+    modifiersField.setAccessible(true);
+    modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL);
+    f.set(obj, value);
+  }
+
+  public static Field getModifiersField() throws ReflectiveOperationException {
+    Method getDeclaredFields0 = Class.class.getDeclaredMethod("getDeclaredFields0", boolean.class);
+    getDeclaredFields0.setAccessible(true);
+    Field[] fields = (Field[]) getDeclaredFields0.invoke(Field.class, false);
+    for (Field each : fields) {
+      if ("modifiers".equals(each.getName())) {
+        return each;
+      }
+    }
+    throw new UnsupportedOperationException();
+  }
 }

+ 6 - 14
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java

@@ -46,8 +46,6 @@ import java.io.ByteArrayOutputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.UnknownHostException;
@@ -90,6 +88,7 @@ import org.apache.hadoop.io.EnumSetWritable;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 import org.junit.Assert;
 import org.junit.Test;
@@ -715,7 +714,7 @@ public class TestFileCreation {
    */
   @Test
   public void testFileCreationNamenodeRestart()
-      throws IOException, NoSuchFieldException, IllegalAccessException {
+      throws IOException, ReflectiveOperationException {
     Configuration conf = new HdfsConfiguration();
     final int MAX_IDLE_TIME = 2000; // 2s
     conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
@@ -812,20 +811,13 @@ public class TestFileCreation {
 
       // instruct the dfsclient to use a new filename when it requests
       // new blocks for files that were renamed.
-      DFSOutputStream dfstream = (DFSOutputStream)
-                                                 (stm.getWrappedStream());
+      DFSOutputStream dfstream = (DFSOutputStream) (stm.getWrappedStream());
 
-      Field f = DFSOutputStream.class.getDeclaredField("src");
-      Field modifiersField = Field.class.getDeclaredField("modifiers");
-      modifiersField.setAccessible(true);
-      modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL);
-      f.setAccessible(true);
-
-      f.set(dfstream, file1.toString());
+      ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file1.toString());
       dfstream = (DFSOutputStream) (stm3.getWrappedStream());
-      f.set(dfstream, file3new.toString());
+      ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file3new.toString());
       dfstream = (DFSOutputStream) (stm4.getWrappedStream());
-      f.set(dfstream, file4new.toString());
+      ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file4new.toString());
 
       // write 1 byte to file.  This should succeed because the 
       // namenode should have persisted leases.

+ 8 - 16
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStoragePolicyPermissionSettings.java

@@ -21,8 +21,6 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 
 import java.io.IOException;
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -34,6 +32,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.LambdaTestUtils;
+import org.apache.hadoop.test.ReflectionUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -77,22 +76,15 @@ public class TestStoragePolicyPermissionSettings {
     }
   }
 
-  private void setFSNameSystemFinalField(String field, boolean value)
-      throws NoSuchFieldException, IllegalAccessException {
-    Field f = FSNamesystem.class.getDeclaredField(field);
-    f.setAccessible(true);
-    Field modifiersField = Field.class.getDeclaredField("modifiers");
-    modifiersField.setAccessible(true);
-    modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL);
-    f.set(cluster.getNamesystem(), value);
-  }
-
   private void setStoragePolicyPermissions(boolean isStoragePolicyEnabled,
                                            boolean isStoragePolicySuperuserOnly)
-      throws NoSuchFieldException, IllegalAccessException {
-    setFSNameSystemFinalField("isStoragePolicyEnabled", isStoragePolicyEnabled);
-    setFSNameSystemFinalField("isStoragePolicySuperuserOnly",
-        isStoragePolicySuperuserOnly);
+      throws ReflectiveOperationException {
+    ReflectionUtils.setFinalField(
+        FSNamesystem.class, cluster.getNamesystem(),
+        "isStoragePolicyEnabled", isStoragePolicyEnabled);
+    ReflectionUtils.setFinalField(
+        FSNamesystem.class, cluster.getNamesystem(),
+        "isStoragePolicySuperuserOnly", isStoragePolicySuperuserOnly);
   }
 
   @Test

+ 4 - 18
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCreate.java

@@ -21,7 +21,6 @@ package org.apache.hadoop.fs.azurebfs;
 import java.io.FileNotFoundException;
 import java.io.FilterOutputStream;
 import java.io.IOException;
-import java.lang.reflect.Field;
 import java.util.EnumSet;
 import java.util.UUID;
 
@@ -40,6 +39,7 @@ import org.apache.hadoop.fs.azurebfs.security.ContextEncryptionAdapter;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.ReflectionUtils;
 
 import org.apache.hadoop.fs.azurebfs.constants.FSOperationType;
 import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsRestOperationException;
@@ -395,7 +395,9 @@ public class ITestAzureBlobFileSystemCreate extends
         fs.getAbfsStore().getAbfsConfiguration());
 
     AzureBlobFileSystemStore abfsStore = fs.getAbfsStore();
-    abfsStore = setAzureBlobSystemStoreField(abfsStore, "client", mockClient);
+
+    ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, abfsStore, "client", mockClient);
+
     boolean isNamespaceEnabled = abfsStore
         .getIsNamespaceEnabled(getTestTracingContext(fs, false));
 
@@ -486,22 +488,6 @@ public class ITestAzureBlobFileSystemCreate extends
     validateCreateFileException(AbfsRestOperationException.class, abfsStore);
   }
 
-  private AzureBlobFileSystemStore setAzureBlobSystemStoreField(
-      final AzureBlobFileSystemStore abfsStore,
-      final String fieldName,
-      Object fieldObject) throws Exception {
-
-    Field abfsClientField = AzureBlobFileSystemStore.class.getDeclaredField(
-        fieldName);
-    abfsClientField.setAccessible(true);
-    Field modifiersField = Field.class.getDeclaredField("modifiers");
-    modifiersField.setAccessible(true);
-    modifiersField.setInt(abfsClientField,
-        abfsClientField.getModifiers() & ~java.lang.reflect.Modifier.FINAL);
-    abfsClientField.set(abfsStore, fieldObject);
-    return abfsStore;
-  }
-
   private <E extends Throwable> void validateCreateFileException(final Class<E> exceptionClass, final AzureBlobFileSystemStore abfsStore)
       throws Exception {
     FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL,

+ 3 - 3
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java

@@ -41,12 +41,12 @@ import org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation;
 import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation;
 import org.apache.hadoop.fs.azurebfs.services.ITestAbfsClient;
 import org.apache.hadoop.fs.azurebfs.services.TestAbfsPerfTracker;
-import org.apache.hadoop.fs.azurebfs.utils.TestMockHelpers;
 import org.apache.hadoop.fs.azurebfs.utils.TracingContext;
 import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderValidator;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.test.ReflectionUtils;
 
 import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
 import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
@@ -250,9 +250,9 @@ public class ITestAzureBlobFileSystemDelete extends
         fs.getAbfsStore().getClient(),
         this.getConfiguration());
     AzureBlobFileSystemStore mockStore = mock(AzureBlobFileSystemStore.class);
-    mockStore = TestMockHelpers.setClassField(AzureBlobFileSystemStore.class, mockStore,
+    ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, mockStore,
         "client", mockClient);
-    mockStore = TestMockHelpers.setClassField(AzureBlobFileSystemStore.class,
+    ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class,
         mockStore,
         "abfsPerfTracker",
         TestAbfsPerfTracker.getAPerfTrackerInstance(this.getConfiguration()));

+ 9 - 28
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsClient.java

@@ -19,7 +19,6 @@
 package org.apache.hadoop.fs.azurebfs.services;
 
 import java.io.IOException;
-import java.lang.reflect.Field;
 import java.net.ProtocolException;
 import java.net.URL;
 import java.util.Arrays;
@@ -34,9 +33,11 @@ import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.mockito.Mockito;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.azurebfs.AbfsConfiguration;
+import org.apache.hadoop.fs.azurebfs.AbfsCountersImpl;
 import org.apache.hadoop.fs.azurebfs.AbstractAbfsIntegrationTest;
 import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem;
 import org.apache.hadoop.fs.azurebfs.TestAbfsConfigurationFieldsValidation;
@@ -46,11 +47,11 @@ import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsApacheHttpExpect10
 import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AzureBlobFileSystemException;
 import org.apache.hadoop.fs.azurebfs.contracts.services.AppendRequestParameters;
 import org.apache.hadoop.fs.azurebfs.oauth2.AccessTokenProvider;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
 import org.apache.hadoop.fs.azurebfs.utils.TracingContext;
 import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderFormat;
 import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory;
+import org.apache.hadoop.test.ReflectionUtils;
 import org.apache.http.HttpResponse;
 
 import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
@@ -410,50 +411,30 @@ public final class ITestAbfsClient extends AbstractAbfsIntegrationTest {
     Mockito.doReturn(baseAbfsClientInstance.getAbfsApacheHttpClient()).when(client).getAbfsApacheHttpClient();
 
     // override baseurl
-    client = ITestAbfsClient.setAbfsClientField(client, "abfsConfiguration",
-        abfsConfig);
+    ReflectionUtils.setFinalField(AbfsClient.class, client, "abfsConfiguration", abfsConfig);
 
     // override baseurl
-    client = ITestAbfsClient.setAbfsClientField(client, "baseUrl",
-        baseAbfsClientInstance.getBaseUrl());
+    ReflectionUtils.setFinalField(AbfsClient.class, client, "baseUrl", baseAbfsClientInstance.getBaseUrl());
 
     // override xMsVersion
-    client = ITestAbfsClient.setAbfsClientField(client, "xMsVersion",
-        baseAbfsClientInstance.getxMsVersion());
+    ReflectionUtils.setFinalField(AbfsClient.class, client, "xMsVersion", baseAbfsClientInstance.getxMsVersion());
 
     // override auth provider
     if (currentAuthType == AuthType.SharedKey) {
-      client = ITestAbfsClient.setAbfsClientField(client, "sharedKeyCredentials",
-          new SharedKeyCredentials(
+      ReflectionUtils.setFinalField(AbfsClient.class, client, "sharedKeyCredentials", new SharedKeyCredentials(
               abfsConfig.getAccountName().substring(0,
                   abfsConfig.getAccountName().indexOf(DOT)),
               abfsConfig.getStorageAccountKey()));
     } else {
-      client = ITestAbfsClient.setAbfsClientField(client, "tokenProvider",
-          abfsConfig.getTokenProvider());
+      ReflectionUtils.setFinalField(AbfsClient.class, client, "tokenProvider", abfsConfig.getTokenProvider());
     }
 
     // override user agent
     String userAgent = "APN/1.0 Azure Blob FS/3.4.1-SNAPSHOT (PrivateBuild "
         + "JavaJRE 1.8.0_252; Linux 5.3.0-59-generic/amd64; openssl-1.0; "
         + "UNKNOWN/UNKNOWN) MSFT";
-    client = ITestAbfsClient.setAbfsClientField(client, "userAgent", userAgent);
-
-    return client;
-  }
+    ReflectionUtils.setFinalField(AbfsClient.class, client, "userAgent", userAgent);
 
-  static AbfsClient setAbfsClientField(
-      final AbfsClient client,
-      final String fieldName,
-      Object fieldObject) throws Exception {
-
-    Field field = AbfsClient.class.getDeclaredField(fieldName);
-    field.setAccessible(true);
-    Field modifiersField = Field.class.getDeclaredField("modifiers");
-    modifiersField.setAccessible(true);
-    modifiersField.setInt(field,
-        field.getModifiers() & ~java.lang.reflect.Modifier.FINAL);
-    field.set(client, fieldObject);
     return client;
   }
 

+ 2 - 1
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsPaginatedDelete.java

@@ -41,6 +41,7 @@ import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclEntryScope;
 import org.apache.hadoop.fs.permission.AclEntryType;
 import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.test.ReflectionUtils;
 import org.apache.hadoop.util.Lists;
 
 import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
@@ -194,7 +195,7 @@ public class ITestAbfsPaginatedDelete extends AbstractAbfsIntegrationTest {
 
     // Set the paginated enabled value and xMsVersion at spiedClient level.
     AbfsClient spiedClient = Mockito.spy(fs.getAbfsStore().getClient());
-    ITestAbfsClient.setAbfsClientField(spiedClient, "xMsVersion", xMsVersion);
+    ReflectionUtils.setFinalField(AbfsClient.class, spiedClient, "xMsVersion", xMsVersion);
     Mockito.doReturn(isPaginatedDeleteEnabled).when(spiedClient).getIsPaginatedDeleteEnabled();
 
     AbfsRestOperation op = spiedClient.deletePath(

+ 0 - 59
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/TestMockHelpers.java

@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.azurebfs.utils;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-
-/**
- * Test Mock Helpers.
- */
-public final class TestMockHelpers {
-
-  /**
-   * Sets a class field by reflection.
-   * @param type
-   * @param obj
-   * @param fieldName
-   * @param fieldObject
-   * @param <T>
-   * @return
-   * @throws Exception
-   */
-  public static <T> T setClassField(
-      Class<T> type,
-      final T obj,
-      final String fieldName,
-      Object fieldObject) throws Exception {
-
-    Field field = type.getDeclaredField(fieldName);
-    field.setAccessible(true);
-    Field modifiersField = Field.class.getDeclaredField("modifiers");
-    modifiersField.setAccessible(true);
-    modifiersField.setInt(field,
-        field.getModifiers() & ~Modifier.FINAL);
-    field.set(obj, fieldObject);
-
-    return obj;
-  }
-
-  private TestMockHelpers() {
-    // Not called. - For checkstyle: HideUtilityClassConstructor
-  }
-}

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java

@@ -37,6 +37,7 @@ import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.impl.MetricsCollectorImpl;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.ReflectionUtils;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.metrics.GenericEventTypeMetrics;
@@ -183,7 +184,7 @@ public class TestAsyncDispatcher {
 
     Field logger = AsyncDispatcher.class.getDeclaredField("LOG");
     logger.setAccessible(true);
-    Field modifiers = Field.class.getDeclaredField("modifiers");
+    Field modifiers = ReflectionUtils.getModifiersField();
     modifiers.setAccessible(true);
     modifiers.setInt(logger, logger.getModifiers() & ~Modifier.FINAL);
     Object oldLog = logger.get(null);
@@ -229,7 +230,7 @@ public class TestAsyncDispatcher {
 
     Field logger = AsyncDispatcher.class.getDeclaredField("LOG");
     logger.setAccessible(true);
-    Field modifiers = Field.class.getDeclaredField("modifiers");
+    Field modifiers = ReflectionUtils.getModifiersField();
     modifiers.setAccessible(true);
     modifiers.setInt(logger, logger.getModifiers() & ~Modifier.FINAL);
     Object oldLog = logger.get(null);