-(Service: AWSKMS; Status Code: 400; Error Code: AccessDeniedException;
- Request ID: 4ded9f1f-b245-4213-87fc-16cba7a1c4b9; Proxy: null)
-```
-
-The user trying to use the KMS Key ID should have the right permissions to access
-(encrypt/decrypt) using the AWS KMS Key used via `fs.s3a.encryption.key`.
-If not, then add permission(or IAM role) in "Key users" section by selecting the
-AWS-KMS CMK Key on AWS console.
-
-
### Message appears in logs "Not all bytes were read from the S3ObjectInputStream"
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
index 5069f949ea221..8a955a5869b2b 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
@@ -63,7 +63,7 @@
import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestPropertyBool;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.unsetAllEncryptionPropertiesForBucket;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.unsetAllEncryptionPropertiesForBaseAndBucket;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
/**
@@ -282,7 +282,7 @@ public void testUnencryptedObjectReadWithV1CompatibilityConfig() throws Exceptio
maybeSkipTest();
// initialize base s3 client.
Configuration conf = new Configuration(getConfiguration());
- unsetAllEncryptionPropertiesForBucket(conf);
+ unsetAllEncryptionPropertiesForBaseAndBucket(conf);
Path file = methodPath();
@@ -315,7 +315,6 @@ public void testUnencryptedObjectReadWithV1CompatibilityConfig() throws Exceptio
public void testSizeOfEncryptedObjectFromHeaderWithV1Compatibility() throws Exception {
maybeSkipTest();
Configuration cseConf = new Configuration(getConfiguration());
- unsetAllEncryptionPropertiesForBucket(cseConf);
cseConf.setBoolean(S3_ENCRYPTION_CSE_V1_COMPATIBILITY_ENABLED, true);
try (S3AFileSystem fs = createTestFileSystem(cseConf)) {
fs.initialize(getFileSystem().getUri(), cseConf);
@@ -354,7 +353,7 @@ public void testSizeOfEncryptedObjectFromHeaderWithV1Compatibility() throws Exce
public void testSizeOfUnencryptedObjectWithV1Compatibility() throws Exception {
maybeSkipTest();
Configuration conf = new Configuration(getConfiguration());
- unsetAllEncryptionPropertiesForBucket(conf);
+ unsetAllEncryptionPropertiesForBaseAndBucket(conf);
conf.setBoolean(S3_ENCRYPTION_CSE_V1_COMPATIBILITY_ENABLED, false);
Path file = methodPath();
try (S3AFileSystem fs = createTestFileSystem(conf)) {
@@ -386,7 +385,6 @@ public void testSizeOfUnencryptedObjectWithV1Compatibility() throws Exception {
public void testSizeOfEncryptedObjectWithV1Compatibility() throws Exception {
maybeSkipTest();
Configuration cseConf = new Configuration(getConfiguration());
- unsetAllEncryptionPropertiesForBucket(cseConf);
cseConf.setBoolean(S3_ENCRYPTION_CSE_V1_COMPATIBILITY_ENABLED, true);
try (S3AFileSystem fs = createTestFileSystem(cseConf)) {
fs.initialize(getFileSystem().getUri(), cseConf);
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionCustom.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionCustom.java
index 8065ce1b3a759..9b54c06648c2f 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionCustom.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionCustom.java
@@ -31,7 +31,6 @@
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CSE_CUSTOM_KEYRING_CLASS_NAME;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.unsetAllEncryptionPropertiesForBucket;
/**
* Tests to verify Custom S3 client side encryption CSE-CUSTOM.
@@ -48,7 +47,6 @@ public class ITestS3AClientSideEncryptionCustom extends ITestS3AClientSideEncryp
protected Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
S3ATestUtils.disableFilesystemCaching(conf);
- unsetAllEncryptionPropertiesForBucket(conf);
conf.set(S3_ENCRYPTION_CSE_CUSTOM_KEYRING_CLASS_NAME,
CustomKeyring.class.getCanonicalName());
return conf;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
index 66b25a054741e..308838c2927fa 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
@@ -644,7 +644,7 @@ public static void unsetEncryption(Configuration conf) {
}
/**
- * Removes all encryption-related properties for a specific S3 bucket from given configuration.
+ * Removes all encryption-related properties.
*
* This method unsets various encryption settings specific to the test bucket. It removes
* bucket-specific overrides for multiple encryption-related properties, including both
@@ -653,8 +653,8 @@ public static void unsetEncryption(Configuration conf) {
* @param conf The Configuration object from which to remove the encryption properties.
* This object will be modified by this method.
*/
- public static void unsetAllEncryptionPropertiesForBucket(Configuration conf) {
- removeBucketOverrides(getTestBucketName(conf),
+ public static void unsetAllEncryptionPropertiesForBaseAndBucket(Configuration conf) {
+ removeBaseAndBucketOverrides(getTestBucketName(conf),
conf,
S3_ENCRYPTION_ALGORITHM,
S3_ENCRYPTION_KEY,
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestUploadRecovery.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestUploadRecovery.java
index b16ad4f6235c3..2ede6d82798d0 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestUploadRecovery.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestUploadRecovery.java
@@ -39,6 +39,7 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.AWSClientIOException;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.fs.s3a.commit.files.SinglePendingCommit;
import org.apache.hadoop.fs.s3a.commit.impl.CommitContext;
@@ -199,13 +200,28 @@ public void testMagicWriteRecovery() throws Throwable {
MAGIC_PATH_PREFIX + buffer + "/" + BASE + "/file.txt");
SdkFaultInjector.setEvaluator(SdkFaultInjector::isPartUpload);
- final FSDataOutputStream out = fs.create(path);
-
- // set the failure count again
- SdkFaultInjector.setRequestFailureCount(2);
-
- out.writeUTF("utfstring");
- out.close();
+ boolean isExceptionThrown = false;
+ try {
+ final FSDataOutputStream out = fs.create(path);
+
+ // set the failure count again
+ SdkFaultInjector.setRequestFailureCount(2);
+
+ out.writeUTF("utfstring");
+ out.close();
+ } catch (AWSClientIOException exception) {
+ if (!fs.isCSEEnabled()) {
+ throw exception;
+ }
+ isExceptionThrown = true;
+ }
+ // Retrying MPU is not supported when CSE is enabled.
+ // Hence, it is expected to throw exception in that case.
+ if (fs.isCSEEnabled()) {
+ Assertions.assertThat(isExceptionThrown)
+ .describedAs("Exception should be thrown when CSE is enabled")
+ .isTrue();
+ }
}
/**
@@ -213,6 +229,7 @@ public void testMagicWriteRecovery() throws Throwable {
*/
@Test
public void testCommitOperations() throws Throwable {
+ skipIfClientSideEncryption();
Assumptions.assumeThat(includeCommitTest)
.describedAs("commit test excluded")
.isTrue();
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java
index 1916a0472cc1a..ed7a32928b8bf 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java
@@ -90,6 +90,7 @@ public void testQuietLogging() throws Throwable {
*/
@Test
public void testNoisyLogging() throws Throwable {
+ skipIfClientSideEncryption();
try (S3AFileSystem newFs = newFileSystem()) {
noisyLogging();
String output = createAndLogTransferManager(newFs);