This is an automated email from the ASF dual-hosted git repository. stevel pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git
commit 08ef3c477a7c49849a2524b4089f53c31b742a53 Author: Steve Loughran <ste...@cloudera.com> AuthorDate: Wed Apr 2 19:42:04 2025 +0100 Revert "HADOOP-19492. S3A: Some tests failing on third-party stores" This reverts commit 2dd658252bd2ec9831c0984823e173fca11f9051. --- .../site/markdown/tools/hadoop-aws/third_party_stores.md | 15 ++------------- .../site/markdown/tools/hadoop-aws/troubleshooting_s3a.md | 2 +- .../org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java | 2 -- .../org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java | 2 -- .../org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java | 3 --- .../test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java | 2 +- .../org/apache/hadoop/fs/s3a/tools/ITestBucketTool.java | 1 - 7 files changed, 4 insertions(+), 23 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/third_party_stores.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/third_party_stores.md index feffdf0a8b0..1018ec9e7d6 100644 --- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/third_party_stores.md +++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/third_party_stores.md @@ -40,7 +40,6 @@ The features which may be unavailable include: This is now the default -do not change it. * List API to use (`fs.s3a.list.version = 1`) * Bucket lifecycle rules to clean up pending uploads. -* Support for multipart uploads. ### Disabling Change Detection @@ -410,7 +409,7 @@ which is a subset of the AWS API. To get a compatible access and secret key, follow the instructions of [Simple migration from Amazon S3 to Cloud Storage](https://cloud.google.com/storage/docs/aws-simple-migration#defaultproj). -Here are the per-bucket settings for an example bucket "gcs-container" +Here are the per-bucket setings for an example bucket "gcs-container" in Google Cloud Storage. Note the multiobject delete option must be disabled; this makes renaming and deleting significantly slower. @@ -453,21 +452,11 @@ this makes renaming and deleting significantly slower. <value>true</value> </property> - <!-- any value is allowed here, using "gcs" is more informative --> <property> <name>fs.s3a.bucket.gcs-container.endpoint.region</name> - <value>gcs</value> + <value>dummy</value> </property> - <!-- multipart uploads trigger 400 response--> - <property> - <name>fs.s3a.multipart.uploads.enabled</name> - <value>false</value> - </property> - <property> - <name>fs.s3a.optimized.copy.from.local.enabled</name> - <value>false</value> - </property> </configuration> ``` diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md index 54fb5f87c47..0638c2e4681 100644 --- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md +++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md @@ -1392,7 +1392,7 @@ software.amazon.awssdk.metrics.LoggingMetricPublisher ``` ```text -INFO metrics.LoggingMetricPublisher (LoggerAdapter.java:info(165)) - Metrics published: +INFO metrics.LoggingMetricPublisher (LoggerAdapter.java:info(165)) - Metrics published: MetricCollection(name=ApiCall, metrics=[ MetricRecord(metric=MarshallingDuration, value=PT0.000092041S), MetricRecord(metric=RetryCount, value=0), diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java index e6dabc91bac..97af80e70a5 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java @@ -589,8 +589,6 @@ public void testS3SpecificSignerOverride() throws Exception { config.set(AWS_REGION, EU_WEST_1); disableFilesystemCaching(config); fs = S3ATestUtils.createTestFileSystem(config); - assumeStoreAwsHosted(fs); - S3Client s3Client = getS3Client("testS3SpecificSignerOverride"); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java index 0f79881466f..8671d962175 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java @@ -39,7 +39,6 @@ import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM; import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY; -import static org.apache.hadoop.fs.s3a.S3ATestUtils.assumeStoreAwsHosted; import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName; import static org.apache.hadoop.fs.s3a.S3ATestUtils.maybeSkipRootTests; import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides; @@ -102,7 +101,6 @@ public void setup() throws Exception { // although not a root dir test, this confuses paths enough it shouldn't be run in // parallel with other jobs maybeSkipRootTests(getConfiguration()); - assumeStoreAwsHosted(getFileSystem()); } @Override diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java index 62f2ffbc0df..44d5a4a606c 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEndpointRegion.java @@ -55,7 +55,6 @@ import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM; import static org.apache.hadoop.fs.s3a.DefaultS3ClientFactory.ERROR_ENDPOINT_WITH_FIPS; import static org.apache.hadoop.fs.s3a.S3ATestUtils.assume; -import static org.apache.hadoop.fs.s3a.S3ATestUtils.assumeStoreAwsHosted; import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides; import static org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils.DEFAULT_REQUESTER_PAYS_BUCKET_NAME; import static org.apache.hadoop.io.IOUtils.closeStream; @@ -482,7 +481,6 @@ public void testCentralEndpointAndNullRegionWithCRUD() throws Throwable { describe("Access the test bucket using central endpoint and" + " null region, perform file system CRUD operations"); final Configuration conf = getConfiguration(); - assumeStoreAwsHosted(getFileSystem()); final Configuration newConf = new Configuration(conf); @@ -505,7 +503,6 @@ public void testCentralEndpointAndNullRegionWithCRUD() throws Throwable { public void testCentralEndpointAndNullRegionFipsWithCRUD() throws Throwable { describe("Access the test bucket using central endpoint and" + " null region and fips enabled, perform file system CRUD operations"); - assumeStoreAwsHosted(getFileSystem()); final String bucketLocation = getFileSystem().getBucketLocation(); assume("FIPS can be enabled to access buckets from US or Canada endpoints only", diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java index 53e4a68cbb6..132401ce8ff 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java @@ -1162,7 +1162,7 @@ public static void assumeNotS3ExpressFileSystem(final FileSystem fs) { */ public static void assumeStoreAwsHosted(final FileSystem fs) { assume("store is not AWS S3", - NetworkBinding.isAwsEndpoint(fs.getConf() + !NetworkBinding.isAwsEndpoint(fs.getConf() .getTrimmed(ENDPOINT, DEFAULT_ENDPOINT))); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/tools/ITestBucketTool.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/tools/ITestBucketTool.java index b37e6eec7c8..50ffce7d87a 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/tools/ITestBucketTool.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/tools/ITestBucketTool.java @@ -157,7 +157,6 @@ public void testSimpleBucketWithZoneParam() throws Throwable { @Test public void testS3ExpressBucketWithoutZoneParam() throws Throwable { - assumeStoreAwsHosted(getFileSystem()); expectErrorCode(EXIT_USAGE, intercept(ExitUtil.ExitException.class, NO_ZONE_SUPPLIED, () -> bucketTool.exec("bucket", d(CREATE), --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org