Skip to content

Commit

Permalink
HADOOP-19367. Fix setting final field value on Java 17 (#7228)
Browse files Browse the repository at this point in the history
  • Loading branch information
pan3793 authored Dec 19, 2024
1 parent 7f49190 commit 6cb2e86
Show file tree
Hide file tree
Showing 9 changed files with 60 additions and 142 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
package org.apache.hadoop.test;

import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;

public final class ReflectionUtils {
private ReflectionUtils() {}
Expand Down Expand Up @@ -48,4 +50,27 @@ public static String getStringValueOfField(Field f) throws IllegalAccessExceptio
return null;
}
}

public static <T> void setFinalField(
Class<T> type, final T obj, final String fieldName, Object value)
throws ReflectiveOperationException {
Field f = type.getDeclaredField(fieldName);
f.setAccessible(true);
Field modifiersField = ReflectionUtils.getModifiersField();
modifiersField.setAccessible(true);
modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL);
f.set(obj, value);
}

public static Field getModifiersField() throws ReflectiveOperationException {
Method getDeclaredFields0 = Class.class.getDeclaredMethod("getDeclaredFields0", boolean.class);
getDeclaredFields0.setAccessible(true);
Field[] fields = (Field[]) getDeclaredFields0.invoke(Field.class, false);
for (Field each : fields) {
if ("modifiers".equals(each.getName())) {
return each;
}
}
throw new UnsupportedOperationException();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,6 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.UnknownHostException;
Expand Down Expand Up @@ -90,6 +88,7 @@
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.ReflectionUtils;
import org.apache.hadoop.util.Time;
import org.junit.Assert;
import org.junit.Test;
Expand Down Expand Up @@ -715,7 +714,7 @@ public void testFileCreationError3() throws IOException {
*/
@Test
public void testFileCreationNamenodeRestart()
throws IOException, NoSuchFieldException, IllegalAccessException {
throws IOException, ReflectiveOperationException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
Expand Down Expand Up @@ -812,20 +811,13 @@ public void testFileCreationNamenodeRestart()

// instruct the dfsclient to use a new filename when it requests
// new blocks for files that were renamed.
DFSOutputStream dfstream = (DFSOutputStream)
(stm.getWrappedStream());
DFSOutputStream dfstream = (DFSOutputStream) (stm.getWrappedStream());

Field f = DFSOutputStream.class.getDeclaredField("src");
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL);
f.setAccessible(true);

f.set(dfstream, file1.toString());
ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file1.toString());
dfstream = (DFSOutputStream) (stm3.getWrappedStream());
f.set(dfstream, file3new.toString());
ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file3new.toString());
dfstream = (DFSOutputStream) (stm4.getWrappedStream());
f.set(dfstream, file4new.toString());
ReflectionUtils.setFinalField(DFSOutputStream.class, dfstream, "src", file4new.toString());

// write 1 byte to file. This should succeed because the
// namenode should have persisted leases.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@
import static org.junit.Assert.assertNotEquals;

import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -34,6 +32,7 @@
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.test.ReflectionUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
Expand Down Expand Up @@ -77,22 +76,15 @@ public static void clusterShutdown() throws IOException {
}
}

private void setFSNameSystemFinalField(String field, boolean value)
throws NoSuchFieldException, IllegalAccessException {
Field f = FSNamesystem.class.getDeclaredField(field);
f.setAccessible(true);
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(f, f.getModifiers() & ~Modifier.FINAL);
f.set(cluster.getNamesystem(), value);
}

private void setStoragePolicyPermissions(boolean isStoragePolicyEnabled,
boolean isStoragePolicySuperuserOnly)
throws NoSuchFieldException, IllegalAccessException {
setFSNameSystemFinalField("isStoragePolicyEnabled", isStoragePolicyEnabled);
setFSNameSystemFinalField("isStoragePolicySuperuserOnly",
isStoragePolicySuperuserOnly);
throws ReflectiveOperationException {
ReflectionUtils.setFinalField(
FSNamesystem.class, cluster.getNamesystem(),
"isStoragePolicyEnabled", isStoragePolicyEnabled);
ReflectionUtils.setFinalField(
FSNamesystem.class, cluster.getNamesystem(),
"isStoragePolicySuperuserOnly", isStoragePolicySuperuserOnly);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import java.io.FileNotFoundException;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.EnumSet;
import java.util.UUID;

Expand All @@ -40,6 +39,7 @@
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.ReflectionUtils;

import org.apache.hadoop.fs.azurebfs.constants.FSOperationType;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsRestOperationException;
Expand Down Expand Up @@ -395,7 +395,9 @@ public void testNegativeScenariosForCreateOverwriteDisabled()
fs.getAbfsStore().getAbfsConfiguration());

AzureBlobFileSystemStore abfsStore = fs.getAbfsStore();
abfsStore = setAzureBlobSystemStoreField(abfsStore, "client", mockClient);

ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, abfsStore, "client", mockClient);

boolean isNamespaceEnabled = abfsStore
.getIsNamespaceEnabled(getTestTracingContext(fs, false));

Expand Down Expand Up @@ -486,22 +488,6 @@ public void testNegativeScenariosForCreateOverwriteDisabled()
validateCreateFileException(AbfsRestOperationException.class, abfsStore);
}

private AzureBlobFileSystemStore setAzureBlobSystemStoreField(
final AzureBlobFileSystemStore abfsStore,
final String fieldName,
Object fieldObject) throws Exception {

Field abfsClientField = AzureBlobFileSystemStore.class.getDeclaredField(
fieldName);
abfsClientField.setAccessible(true);
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(abfsClientField,
abfsClientField.getModifiers() & ~java.lang.reflect.Modifier.FINAL);
abfsClientField.set(abfsStore, fieldObject);
return abfsStore;
}

private <E extends Throwable> void validateCreateFileException(final Class<E> exceptionClass, final AzureBlobFileSystemStore abfsStore)
throws Exception {
FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@
import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation;
import org.apache.hadoop.fs.azurebfs.services.ITestAbfsClient;
import org.apache.hadoop.fs.azurebfs.services.TestAbfsPerfTracker;
import org.apache.hadoop.fs.azurebfs.utils.TestMockHelpers;
import org.apache.hadoop.fs.azurebfs.utils.TracingContext;
import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderValidator;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.ReflectionUtils;

import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
Expand Down Expand Up @@ -250,9 +250,9 @@ public void testDeleteIdempotencyTriggerHttp404() throws Exception {
fs.getAbfsStore().getClient(),
this.getConfiguration());
AzureBlobFileSystemStore mockStore = mock(AzureBlobFileSystemStore.class);
mockStore = TestMockHelpers.setClassField(AzureBlobFileSystemStore.class, mockStore,
ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, mockStore,
"client", mockClient);
mockStore = TestMockHelpers.setClassField(AzureBlobFileSystemStore.class,
ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class,
mockStore,
"abfsPerfTracker",
TestAbfsPerfTracker.getAPerfTrackerInstance(this.getConfiguration()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
package org.apache.hadoop.fs.azurebfs.services;

import java.io.IOException;
import java.lang.reflect.Field;
import java.net.ProtocolException;
import java.net.URI;
import java.net.URISyntaxException;
Expand All @@ -29,17 +28,18 @@
import java.util.Random;
import java.util.regex.Pattern;

import org.apache.hadoop.fs.azurebfs.AbfsCountersImpl;
import org.assertj.core.api.Assertions;
import org.junit.Assume;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.mockito.Mockito;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.azurebfs.AbfsConfiguration;
import org.apache.hadoop.fs.azurebfs.AbfsCountersImpl;
import org.apache.hadoop.fs.azurebfs.AbstractAbfsIntegrationTest;
import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem;
import org.apache.hadoop.fs.azurebfs.TestAbfsConfigurationFieldsValidation;
Expand All @@ -49,11 +49,11 @@
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AzureBlobFileSystemException;
import org.apache.hadoop.fs.azurebfs.contracts.services.AppendRequestParameters;
import org.apache.hadoop.fs.azurebfs.oauth2.AccessTokenProvider;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
import org.apache.hadoop.fs.azurebfs.utils.TracingContext;
import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderFormat;
import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory;
import org.apache.hadoop.test.ReflectionUtils;
import org.apache.http.HttpResponse;

import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
Expand Down Expand Up @@ -423,50 +423,30 @@ public static AbfsClient getMockAbfsClient(AbfsClient baseAbfsClientInstance,
Mockito.doReturn(baseAbfsClientInstance.getAbfsApacheHttpClient()).when(client).getAbfsApacheHttpClient();

// override baseurl
client = ITestAbfsClient.setAbfsClientField(client, "abfsConfiguration",
abfsConfig);
ReflectionUtils.setFinalField(AbfsClient.class, client, "abfsConfiguration", abfsConfig);

// override baseurl
client = ITestAbfsClient.setAbfsClientField(client, "baseUrl",
baseAbfsClientInstance.getBaseUrl());
ReflectionUtils.setFinalField(AbfsClient.class, client, "baseUrl", baseAbfsClientInstance.getBaseUrl());

// override xMsVersion
client = ITestAbfsClient.setAbfsClientField(client, "xMsVersion",
baseAbfsClientInstance.getxMsVersion());
ReflectionUtils.setFinalField(AbfsClient.class, client, "xMsVersion", baseAbfsClientInstance.getxMsVersion());

// override auth provider
if (currentAuthType == AuthType.SharedKey) {
client = ITestAbfsClient.setAbfsClientField(client, "sharedKeyCredentials",
new SharedKeyCredentials(
ReflectionUtils.setFinalField(AbfsClient.class, client, "sharedKeyCredentials", new SharedKeyCredentials(
abfsConfig.getAccountName().substring(0,
abfsConfig.getAccountName().indexOf(DOT)),
abfsConfig.getStorageAccountKey()));
} else {
client = ITestAbfsClient.setAbfsClientField(client, "tokenProvider",
abfsConfig.getTokenProvider());
ReflectionUtils.setFinalField(AbfsClient.class, client, "tokenProvider", abfsConfig.getTokenProvider());
}

// override user agent
String userAgent = "APN/1.0 Azure Blob FS/3.5.0-SNAPSHOT (PrivateBuild "
+ "JavaJRE 1.8.0_252; Linux 5.3.0-59-generic/amd64; openssl-1.0; "
+ "UNKNOWN/UNKNOWN) MSFT";
client = ITestAbfsClient.setAbfsClientField(client, "userAgent", userAgent);

return client;
}
ReflectionUtils.setFinalField(AbfsClient.class, client, "userAgent", userAgent);

static AbfsClient setAbfsClientField(
final AbfsClient client,
final String fieldName,
Object fieldObject) throws Exception {

Field field = AbfsClient.class.getDeclaredField(fieldName);
field.setAccessible(true);
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(field,
field.getModifiers() & ~java.lang.reflect.Modifier.FINAL);
field.set(client, fieldObject);
return client;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.hadoop.fs.permission.AclEntryScope;
import org.apache.hadoop.fs.permission.AclEntryType;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.test.ReflectionUtils;
import org.apache.hadoop.util.Lists;

import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
Expand Down Expand Up @@ -194,7 +195,7 @@ private void testRecursiveDeleteWithPaginationInternal(boolean isEmptyDir,

// Set the paginated enabled value and xMsVersion at spiedClient level.
AbfsClient spiedClient = Mockito.spy(fs.getAbfsStore().getClient());
ITestAbfsClient.setAbfsClientField(spiedClient, "xMsVersion", xMsVersion);
ReflectionUtils.setFinalField(AbfsClient.class, spiedClient, "xMsVersion", xMsVersion);
Mockito.doReturn(isPaginatedDeleteEnabled).when(spiedClient).getIsPaginatedDeleteEnabled();

AbfsRestOperation op = spiedClient.deletePath(
Expand Down

This file was deleted.

Loading

0 comments on commit 6cb2e86

Please sign in to comment.