This is an automated email from the ASF dual-hosted git repository. stevel pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git
commit 89a988b0a88871370b9ea8551194c64fb0d8a1ae Author: Steve Loughran <ste...@cloudera.com> AuthorDate: Fri Mar 7 17:21:40 2025 +0000 HADOOP-19485. S3A: Test failures after SDK upgrade Code changes related to HADOOP-19485. AwsSdkWorkarounds no longer needs to cut back on transfer manager logging (HADOOP-19272). - Remove log downgrade and change assertion to expect nothing to be logged. - remove false positives from log ITestS3AEndpointRegion failure: Change in state of AwsExecutionAttribute.ENDPOINT_OVERRIDDEN attribute requires test tuning to match. Change-Id: Iee88e13c113daf852c82c4a11c5029a62efabb71 --- .../hadoop/fs/s3a/impl/AwsSdkWorkarounds.java | 16 ++++-- .../hadoop/fs/s3a/ITestS3AEndpointRegion.java | 66 ++++++++++++++++------ .../hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java | 37 ++---------- .../org/apache/hadoop/fs/sdk/TestAWSV2SDK.java | 2 +- 4 files changed, 64 insertions(+), 57 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AwsSdkWorkarounds.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AwsSdkWorkarounds.java index a0673b123b2..fabb543bd90 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AwsSdkWorkarounds.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AwsSdkWorkarounds.java @@ -18,9 +18,10 @@ package org.apache.hadoop.fs.s3a.impl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.fs.s3a.impl.logging.LogControl; -import org.apache.hadoop.fs.s3a.impl.logging.LogControllerFactory; /** * This class exists to support workarounds for parts of the AWS SDK @@ -35,16 +36,20 @@ public final class AwsSdkWorkarounds { public static final String TRANSFER_MANAGER = "software.amazon.awssdk.transfer.s3.S3TransferManager"; + private static final Logger LOG = LoggerFactory.getLogger(AwsSdkWorkarounds.class); + private AwsSdkWorkarounds() { } /** * Prepare logging before creating AWS clients. + * There is currently no logging to require tuning, + * so this only logs at trace that it was invoked. * @return true if the log tuning operation took place. */ public static boolean prepareLogging() { - return LogControllerFactory.createController(). - setLogLevel(TRANSFER_MANAGER, LogControl.LogLevel.ERROR); + LOG.trace("prepareLogging()"); + return true; } /** @@ -53,7 +58,6 @@ public static boolean prepareLogging() { */ @VisibleForTesting static boolean restoreNoisyLogging() { - return LogControllerFactory.createController(). - setLogLevel(TRANSFER_MANAGER, LogControl.LogLevel.INFO); + return true; } } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java index 07caeb02f41..44d5a4a606c 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java @@ -24,9 +24,9 @@ import java.nio.file.AccessDeniedException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.atomic.AtomicReference; import org.assertj.core.api.Assertions; -import org.junit.Ignore; import org.junit.Test; import software.amazon.awssdk.awscore.AwsExecutionAttribute; import software.amazon.awssdk.awscore.exception.AwsServiceException; @@ -106,6 +106,10 @@ public class ITestS3AEndpointRegion extends AbstractS3ATestBase { public static final String EXCEPTION_THROWN_BY_INTERCEPTOR = "Exception thrown by interceptor"; + /** + * Text to include in assertions. + */ + private static final AtomicReference<String> EXPECTED_MESSAGE = new AtomicReference<>(); /** * New FS instance which will be closed in teardown. */ @@ -576,7 +580,7 @@ private void assertOpsUsingNewFs() throws IOException { .isFalse(); } - private final class RegionInterceptor implements ExecutionInterceptor { + private static final class RegionInterceptor implements ExecutionInterceptor { private final String endpoint; private final String region; private final boolean isFips; @@ -591,28 +595,49 @@ private final class RegionInterceptor implements ExecutionInterceptor { public void beforeExecution(Context.BeforeExecution context, ExecutionAttributes executionAttributes) { - if (endpoint != null && !endpoint.endsWith(CENTRAL_ENDPOINT)) { - Assertions.assertThat( - executionAttributes.getAttribute(AwsExecutionAttribute.ENDPOINT_OVERRIDDEN)) - .describedAs("Endpoint not overridden").isTrue(); - Assertions.assertThat( - executionAttributes.getAttribute(AwsExecutionAttribute.CLIENT_ENDPOINT).toString()) - .describedAs("There is an endpoint mismatch").isEqualTo("https://" + endpoint); + // extract state from the execution attributes. + final Boolean endpointOveridden = + executionAttributes.getAttribute(AwsExecutionAttribute.ENDPOINT_OVERRIDDEN); + final String clientEndpoint = + executionAttributes.getAttribute(AwsExecutionAttribute.CLIENT_ENDPOINT).toString(); + final Boolean fipsEnabled = executionAttributes.getAttribute( + AwsExecutionAttribute.FIPS_ENDPOINT_ENABLED); + final String reg = executionAttributes.getAttribute(AwsExecutionAttribute.AWS_REGION). + toString(); + + String state = "SDK beforeExecution callback; " + + "endpointOveridden=" + endpointOveridden + + "; clientEndpoint=" + clientEndpoint + + "; fipsEnabled=" + fipsEnabled + + "; region=" + reg; + + if (endpoint != null && !endpoint.endsWith(CENTRAL_ENDPOINT)) { + Assertions.assertThat(endpointOveridden) + .describedAs("Endpoint not overridden in %s. Client Config=%s", + state, EXPECTED_MESSAGE.get()) + .isTrue(); + + Assertions.assertThat(clientEndpoint) + .describedAs("There is an endpoint mismatch in %s. Client Config=%s", + state, EXPECTED_MESSAGE.get()) + .isEqualTo("https://" + endpoint); } else { - Assertions.assertThat( - executionAttributes.getAttribute(AwsExecutionAttribute.ENDPOINT_OVERRIDDEN)) - .describedAs("Endpoint is overridden").isEqualTo(null); + Assertions.assertThat(endpointOveridden) + .describedAs("Attribute endpointOveridden is null in %s. Client Config=%s", + state, EXPECTED_MESSAGE.get()) + .isEqualTo(false); } - Assertions.assertThat( - executionAttributes.getAttribute(AwsExecutionAttribute.AWS_REGION).toString()) - .describedAs("Incorrect region set").isEqualTo(region); + Assertions.assertThat(reg) + .describedAs("Incorrect region set in %s. Client Config=%s", + state, EXPECTED_MESSAGE.get()) + .isEqualTo(region); // verify the fips state matches expectation. - Assertions.assertThat(executionAttributes.getAttribute( - AwsExecutionAttribute.FIPS_ENDPOINT_ENABLED)) - .describedAs("Incorrect FIPS flag set in execution attributes") + Assertions.assertThat(fipsEnabled) + .describedAs("Incorrect FIPS flag set in %s; Client Config=%s", + state, EXPECTED_MESSAGE.get()) .isNotNull() .isEqualTo(isFips); @@ -637,6 +662,11 @@ private S3Client createS3Client(Configuration conf, String endpoint, String configuredRegion, String expectedRegion, boolean isFips) throws IOException { + String expected = + "endpoint=" + endpoint + "; region=" + configuredRegion + + "; expectedRegion=" + expectedRegion + "; isFips=" + isFips; + LOG.info("Creating S3 client with {}", expected); + EXPECTED_MESSAGE.set(expected); List<ExecutionInterceptor> interceptors = new ArrayList<>(); interceptors.add(new RegionInterceptor(endpoint, expectedRegion, isFips)); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java index ed7a32928b8..d18a722a0e2 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestAwsSdkWorkarounds.java @@ -32,12 +32,9 @@ import static org.apache.hadoop.test.GenericTestUtils.LogCapturer.captureLogs; /** - * Verify that noisy transfer manager logs are turned off. + * Tests for any AWS SDK workaround code. * <p> - * This is done by creating new FS instances and then - * requesting an on-demand transfer manager from the store. - * As this is only done once per FS instance, a new FS is - * required per test case. + * These tests are inevitably brittle against SDK updates. */ public class ITestAwsSdkWorkarounds extends AbstractS3ATestBase { @@ -53,13 +50,6 @@ public class ITestAwsSdkWorkarounds extends AbstractS3ATestBase { private static final Logger XFER_LOG = LoggerFactory.getLogger(AwsSdkWorkarounds.TRANSFER_MANAGER); - /** - * This is the string which keeps being printed. - * {@value}. - */ - private static final String FORBIDDEN = - "The provided S3AsyncClient is an instance of MultipartS3AsyncClient"; - /** * Marginal test run speedup by skipping needless test dir cleanup. * @throws IOException failure @@ -70,23 +60,7 @@ protected void deleteTestDirInTeardown() throws IOException { } /** - * Test instantiation with logging disabled. - */ - @Test - public void testQuietLogging() throws Throwable { - // simulate the base state of logging - noisyLogging(); - // creating a new FS switches to quiet logging - try (S3AFileSystem newFs = newFileSystem()) { - String output = createAndLogTransferManager(newFs); - Assertions.assertThat(output) - .describedAs("LOG output") - .doesNotContain(FORBIDDEN); - } - } - - /** - * Test instantiation with logging disabled. + * Test instantiation with logging enabled. */ @Test public void testNoisyLogging() throws Throwable { @@ -95,9 +69,8 @@ public void testNoisyLogging() throws Throwable { noisyLogging(); String output = createAndLogTransferManager(newFs); Assertions.assertThat(output) - .describedAs("LOG output does not contain the forbidden text." - + " Has the SDK been fixed?") - .contains(FORBIDDEN); + .describedAs("LOG output") + .isEmpty(); } } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/sdk/TestAWSV2SDK.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/sdk/TestAWSV2SDK.java index fca9fcc300c..1ddd3377cf0 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/sdk/TestAWSV2SDK.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/sdk/TestAWSV2SDK.java @@ -57,7 +57,7 @@ public void testShadedClasses() throws IOException { assertThat(v2ClassPath) .as("AWS V2 SDK should be present on the classpath").isNotNull(); List<String> listOfV2SdkClasses = getClassNamesFromJarFile(v2ClassPath); - String awsSdkPrefix = "software/amazon/awssdk"; + String awsSdkPrefix = "software/amazon/"; List<String> unshadedClasses = new ArrayList<>(); for (String awsSdkClass : listOfV2SdkClasses) { if (!awsSdkClass.startsWith(awsSdkPrefix)) { --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org