diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/ReflectionUtils.java index a572f4ae2ee2d..d74dc77d486c2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/ReflectionUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/ReflectionUtils.java @@ -18,6 +18,8 @@ package org.apache.hadoop.test; import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; public final class ReflectionUtils { private ReflectionUtils() {} @@ -48,4 +50,27 @@ public static String getStringValueOfField(Field f) throws IllegalAccessExceptio return null; } } + + public static void setFinalField( + Class type, final T obj, final String fieldName, Object value) + throws ReflectiveOperationException { + Field f = type.getDeclaredField(fieldName); + f.setAccessible(true); + Field modifiersField = ReflectionUtils.getModifiersField(); + modifiersField.setAccessible(true); + modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL); + f.set(obj, value); + } + + public static Field getModifiersField() throws ReflectiveOperationException { + Method getDeclaredFields0 = Class.class.getDeclaredMethod("getDeclaredFields0", boolean.class); + getDeclaredFields0.setAccessible(true); + Field[] fields = (Field[]) getDeclaredFields0.invoke(Field.class, false); + for (Field each : fields) { + if ("modifiers".equals(each.getName())) { + return each; + } + } + throw new UnsupportedOperationException(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java index 44d6052632d82..3d0bbf93a4ac1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java @@ -46,8 +46,6 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; import java.net.InetSocketAddress; import java.net.URI; import java.net.UnknownHostException; @@ -90,6 +88,7 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.test.ReflectionUtils; import org.apache.hadoop.util.Time; import org.junit.Assert; import org.junit.Test; @@ -715,7 +714,7 @@ public void testFileCreationError3() throws IOException { */ @Test public void testFileCreationNamenodeRestart() - throws IOException, NoSuchFieldException, IllegalAccessException { + throws IOException, ReflectiveOperationException { Configuration conf = new HdfsConfiguration(); final int MAX_IDLE_TIME = 2000; // 2s conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME); @@ -812,20 +811,13 @@ public void testFileCreationNamenodeRestart() // instruct the dfsclient to use a new filename when it requests // new blocks for files that were renamed. - DFSOutputStream dfstream = (DFSOutputStream) - (stm.getWrappedStream()); + DFSOutputStream dfstream = (DFSOutputStream) (stm.getWrappedStream()); - Field f = DFSOutputStream.class.getDeclaredField("src"); - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL); - f.setAccessible(true); - - f.set(dfstream, file1.toString()); + ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file1.toString()); dfstream = (DFSOutputStream) (stm3.getWrappedStream()); - f.set(dfstream, file3new.toString()); + ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file3new.toString()); dfstream = (DFSOutputStream) (stm4.getWrappedStream()); - f.set(dfstream, file4new.toString()); + ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file4new.toString()); // write 1 byte to file. This should succeed because the // namenode should have persisted leases. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStoragePolicyPermissionSettings.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStoragePolicyPermissionSettings.java index 81f9126a4f4bc..3ab6d96595b36 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStoragePolicyPermissionSettings.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStoragePolicyPermissionSettings.java @@ -21,8 +21,6 @@ import static org.junit.Assert.assertNotEquals; import java.io.IOException; -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -34,6 +32,7 @@ import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.LambdaTestUtils; +import org.apache.hadoop.test.ReflectionUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -77,22 +76,15 @@ public static void clusterShutdown() throws IOException { } } - private void setFSNameSystemFinalField(String field, boolean value) - throws NoSuchFieldException, IllegalAccessException { - Field f = FSNamesystem.class.getDeclaredField(field); - f.setAccessible(true); - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL); - f.set(cluster.getNamesystem(), value); - } - private void setStoragePolicyPermissions(boolean isStoragePolicyEnabled, boolean isStoragePolicySuperuserOnly) - throws NoSuchFieldException, IllegalAccessException { - setFSNameSystemFinalField("isStoragePolicyEnabled", isStoragePolicyEnabled); - setFSNameSystemFinalField("isStoragePolicySuperuserOnly", - isStoragePolicySuperuserOnly); + throws ReflectiveOperationException { + ReflectionUtils.setFinalField( + FSNamesystem.class, cluster.getNamesystem(), + "isStoragePolicyEnabled", isStoragePolicyEnabled); + ReflectionUtils.setFinalField( + FSNamesystem.class, cluster.getNamesystem(), + "isStoragePolicySuperuserOnly", isStoragePolicySuperuserOnly); } @Test diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCreate.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCreate.java index 98e98953c40fa..deaaa46346f50 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCreate.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCreate.java @@ -21,7 +21,6 @@ import java.io.FileNotFoundException; import java.io.FilterOutputStream; import java.io.IOException; -import java.lang.reflect.Field; import java.util.EnumSet; import java.util.UUID; @@ -40,6 +39,7 @@ import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.test.ReflectionUtils; import org.apache.hadoop.fs.azurebfs.constants.FSOperationType; import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsRestOperationException; @@ -395,7 +395,9 @@ public void testNegativeScenariosForCreateOverwriteDisabled() fs.getAbfsStore().getAbfsConfiguration()); AzureBlobFileSystemStore abfsStore = fs.getAbfsStore(); - abfsStore = setAzureBlobSystemStoreField(abfsStore, "client", mockClient); + + ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, abfsStore, "client", mockClient); + boolean isNamespaceEnabled = abfsStore .getIsNamespaceEnabled(getTestTracingContext(fs, false)); @@ -486,22 +488,6 @@ public void testNegativeScenariosForCreateOverwriteDisabled() validateCreateFileException(AbfsRestOperationException.class, abfsStore); } - private AzureBlobFileSystemStore setAzureBlobSystemStoreField( - final AzureBlobFileSystemStore abfsStore, - final String fieldName, - Object fieldObject) throws Exception { - - Field abfsClientField = AzureBlobFileSystemStore.class.getDeclaredField( - fieldName); - abfsClientField.setAccessible(true); - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(abfsClientField, - abfsClientField.getModifiers() & ~java.lang.reflect.Modifier.FINAL); - abfsClientField.set(abfsStore, fieldObject); - return abfsStore; - } - private void validateCreateFileException(final Class exceptionClass, final AzureBlobFileSystemStore abfsStore) throws Exception { FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java index 9e42f6ba14953..e559a03454bf8 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java @@ -41,12 +41,12 @@ import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation; import org.apache.hadoop.fs.azurebfs.services.ITestAbfsClient; import org.apache.hadoop.fs.azurebfs.services.TestAbfsPerfTracker; -import org.apache.hadoop.fs.azurebfs.utils.TestMockHelpers; import org.apache.hadoop.fs.azurebfs.utils.TracingContext; import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderValidator; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.test.ReflectionUtils; import static java.net.HttpURLConnection.HTTP_BAD_REQUEST; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; @@ -250,9 +250,9 @@ public void testDeleteIdempotencyTriggerHttp404() throws Exception { fs.getAbfsStore().getClient(), this.getConfiguration()); AzureBlobFileSystemStore mockStore = mock(AzureBlobFileSystemStore.class); - mockStore = TestMockHelpers.setClassField(AzureBlobFileSystemStore.class, mockStore, + ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, mockStore, "client", mockClient); - mockStore = TestMockHelpers.setClassField(AzureBlobFileSystemStore.class, + ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, mockStore, "abfsPerfTracker", TestAbfsPerfTracker.getAPerfTrackerInstance(this.getConfiguration())); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsClient.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsClient.java index 3eae1401998b6..ed1bfc38cbbf1 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsClient.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsClient.java @@ -19,7 +19,6 @@ package org.apache.hadoop.fs.azurebfs.services; import java.io.IOException; -import java.lang.reflect.Field; import java.net.ProtocolException; import java.net.URI; import java.net.URISyntaxException; @@ -29,7 +28,6 @@ import java.util.Random; import java.util.regex.Pattern; -import org.apache.hadoop.fs.azurebfs.AbfsCountersImpl; import org.assertj.core.api.Assertions; import org.junit.Assume; import org.junit.Test; @@ -37,9 +35,11 @@ import org.junit.runners.Parameterized; import org.mockito.Mockito; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.azurebfs.AbfsConfiguration; +import org.apache.hadoop.fs.azurebfs.AbfsCountersImpl; import org.apache.hadoop.fs.azurebfs.AbstractAbfsIntegrationTest; import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem; import org.apache.hadoop.fs.azurebfs.TestAbfsConfigurationFieldsValidation; @@ -49,11 +49,11 @@ import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AzureBlobFileSystemException; import org.apache.hadoop.fs.azurebfs.contracts.services.AppendRequestParameters; import org.apache.hadoop.fs.azurebfs.oauth2.AccessTokenProvider; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys; import org.apache.hadoop.fs.azurebfs.utils.TracingContext; import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderFormat; import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory; +import org.apache.hadoop.test.ReflectionUtils; import org.apache.http.HttpResponse; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; @@ -423,50 +423,30 @@ public static AbfsClient getMockAbfsClient(AbfsClient baseAbfsClientInstance, Mockito.doReturn(baseAbfsClientInstance.getAbfsApacheHttpClient()).when(client).getAbfsApacheHttpClient(); // override baseurl - client = ITestAbfsClient.setAbfsClientField(client, "abfsConfiguration", - abfsConfig); + ReflectionUtils.setFinalField(AbfsClient.class, client, "abfsConfiguration", abfsConfig); // override baseurl - client = ITestAbfsClient.setAbfsClientField(client, "baseUrl", - baseAbfsClientInstance.getBaseUrl()); + ReflectionUtils.setFinalField(AbfsClient.class, client, "baseUrl", baseAbfsClientInstance.getBaseUrl()); // override xMsVersion - client = ITestAbfsClient.setAbfsClientField(client, "xMsVersion", - baseAbfsClientInstance.getxMsVersion()); + ReflectionUtils.setFinalField(AbfsClient.class, client, "xMsVersion", baseAbfsClientInstance.getxMsVersion()); // override auth provider if (currentAuthType == AuthType.SharedKey) { - client = ITestAbfsClient.setAbfsClientField(client, "sharedKeyCredentials", - new SharedKeyCredentials( + ReflectionUtils.setFinalField(AbfsClient.class, client, "sharedKeyCredentials", new SharedKeyCredentials( abfsConfig.getAccountName().substring(0, abfsConfig.getAccountName().indexOf(DOT)), abfsConfig.getStorageAccountKey())); } else { - client = ITestAbfsClient.setAbfsClientField(client, "tokenProvider", - abfsConfig.getTokenProvider()); + ReflectionUtils.setFinalField(AbfsClient.class, client, "tokenProvider", abfsConfig.getTokenProvider()); } // override user agent String userAgent = "APN/1.0 Azure Blob FS/3.5.0-SNAPSHOT (PrivateBuild " + "JavaJRE 1.8.0_252; Linux 5.3.0-59-generic/amd64; openssl-1.0; " + "UNKNOWN/UNKNOWN) MSFT"; - client = ITestAbfsClient.setAbfsClientField(client, "userAgent", userAgent); - - return client; - } + ReflectionUtils.setFinalField(AbfsClient.class, client, "userAgent", userAgent); - static AbfsClient setAbfsClientField( - final AbfsClient client, - final String fieldName, - Object fieldObject) throws Exception { - - Field field = AbfsClient.class.getDeclaredField(fieldName); - field.setAccessible(true); - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(field, - field.getModifiers() & ~java.lang.reflect.Modifier.FINAL); - field.set(client, fieldObject); return client; } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsPaginatedDelete.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsPaginatedDelete.java index 5dd92f430e059..8c1fcee5f6fa8 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsPaginatedDelete.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsPaginatedDelete.java @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclEntryType; import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.test.ReflectionUtils; import org.apache.hadoop.util.Lists; import static java.net.HttpURLConnection.HTTP_BAD_REQUEST; @@ -194,7 +195,7 @@ private void testRecursiveDeleteWithPaginationInternal(boolean isEmptyDir, // Set the paginated enabled value and xMsVersion at spiedClient level. AbfsClient spiedClient = Mockito.spy(fs.getAbfsStore().getClient()); - ITestAbfsClient.setAbfsClientField(spiedClient, "xMsVersion", xMsVersion); + ReflectionUtils.setFinalField(AbfsClient.class, spiedClient, "xMsVersion", xMsVersion); Mockito.doReturn(isPaginatedDeleteEnabled).when(spiedClient).getIsPaginatedDeleteEnabled(); AbfsRestOperation op = spiedClient.deletePath( diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/TestMockHelpers.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/TestMockHelpers.java deleted file mode 100644 index e25a099a00ef3..0000000000000 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/TestMockHelpers.java +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.azurebfs.utils; - -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; - -/** - * Test Mock Helpers. - */ -public final class TestMockHelpers { - - /** - * Sets a class field by reflection. - * @param type - * @param obj - * @param fieldName - * @param fieldObject - * @param - * @return - * @throws Exception - */ - public static T setClassField( - Class type, - final T obj, - final String fieldName, - Object fieldObject) throws Exception { - - Field field = type.getDeclaredField(fieldName); - field.setAccessible(true); - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(field, - field.getModifiers() & ~Modifier.FINAL); - field.set(obj, fieldObject); - - return obj; - } - - private TestMockHelpers() { - // Not called. - For checkstyle: HideUtilityClassConstructor - } -} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java index 4119542164cff..def51fb1cc21a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java @@ -37,6 +37,7 @@ import org.apache.hadoop.metrics2.impl.MetricsCollectorImpl; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.test.ReflectionUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.metrics.GenericEventTypeMetrics; @@ -183,7 +184,7 @@ void testPrintDispatcherEventDetails() throws Exception { Field logger = AsyncDispatcher.class.getDeclaredField("LOG"); logger.setAccessible(true); - Field modifiers = Field.class.getDeclaredField("modifiers"); + Field modifiers = ReflectionUtils.getModifiersField(); modifiers.setAccessible(true); modifiers.setInt(logger, logger.getModifiers() & ~Modifier.FINAL); Object oldLog = logger.get(null); @@ -229,7 +230,7 @@ public void testPrintDispatcherEventDetailsAvoidDeadLoopInternal() Field logger = AsyncDispatcher.class.getDeclaredField("LOG"); logger.setAccessible(true); - Field modifiers = Field.class.getDeclaredField("modifiers"); + Field modifiers = ReflectionUtils.getModifiersField(); modifiers.setAccessible(true); modifiers.setInt(logger, logger.getModifiers() & ~Modifier.FINAL); Object oldLog = logger.get(null);