This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new d00b3acd5eca HADOOP-18679. Followup: change method name case (#6854)
d00b3acd5eca is described below

commit d00b3acd5ecac9907dae2f09f42a0c2ce4f94d86
Author: Steve Loughran <ste...@cloudera.com>
AuthorDate: Thu May 30 19:34:30 2024 +0100

    HADOOP-18679. Followup: change method name case (#6854)
    
    
    WrappedIO.bulkDelete_PageSize() => bulkDelete_pageSize()
    
    Makes it consistent with the HADOOP-19131 naming scheme.
    The name needs to be fixed before invoking it through reflection,
    as once that is attempted the binding won't work at run time,
    though compilation will be happy.
    
    Contributed by Steve Loughran
---
 .../src/main/java/org/apache/hadoop/io/wrappedio/WrappedIO.java       | 2 +-
 .../org/apache/hadoop/fs/contract/AbstractContractBulkDeleteTest.java | 2 +-
 .../src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java  | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/wrappedio/WrappedIO.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/wrappedio/WrappedIO.java
index 696055895a19..286557c2c378 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/wrappedio/WrappedIO.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/wrappedio/WrappedIO.java
@@ -54,7 +54,7 @@ public final class WrappedIO {
    * @throws IllegalArgumentException path not valid.
    * @throws IOException problems resolving paths
    */
-  public static int bulkDelete_PageSize(FileSystem fs, Path path) throws 
IOException {
+  public static int bulkDelete_pageSize(FileSystem fs, Path path) throws 
IOException {
     try (BulkDelete bulk = fs.createBulkDelete(path)) {
       return bulk.pageSize();
     }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractBulkDeleteTest.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractBulkDeleteTest.java
index 9ebf9923f39c..1413e74a7e0b 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractBulkDeleteTest.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractBulkDeleteTest.java
@@ -69,7 +69,7 @@ public abstract class AbstractContractBulkDeleteTest extends 
AbstractFSContractT
   public void setUp() throws Exception {
     fs = getFileSystem();
     basePath = path(getClass().getName());
-    pageSize = WrappedIO.bulkDelete_PageSize(getFileSystem(), basePath);
+    pageSize = WrappedIO.bulkDelete_pageSize(getFileSystem(), basePath);
     fs.mkdirs(basePath);
   }
 
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
index 0676dd5b16ed..5aa72e694906 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
@@ -735,7 +735,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
 
     bindReadOnlyRolePolicy(assumedRoleConfig, readOnlyDir);
     roleFS = (S3AFileSystem) destDir.getFileSystem(assumedRoleConfig);
-    int bulkDeletePageSize = WrappedIO.bulkDelete_PageSize(roleFS, destDir);
+    int bulkDeletePageSize = WrappedIO.bulkDelete_pageSize(roleFS, destDir);
     int range = bulkDeletePageSize == 1 ? bulkDeletePageSize : 10;
     touchFiles(fs, readOnlyDir, range);
     touchFiles(roleFS, destDir, range);
@@ -769,7 +769,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     bindReadOnlyRolePolicy(assumedRoleConfig, readOnlyDir);
     roleFS = (S3AFileSystem) destDir.getFileSystem(assumedRoleConfig);
     S3AFileSystem fs = getFileSystem();
-    if (WrappedIO.bulkDelete_PageSize(fs, destDir) == 1) {
+    if (WrappedIO.bulkDelete_pageSize(fs, destDir) == 1) {
       String msg = "Skipping as this test requires more than one path to be 
deleted in bulk";
       LOG.debug(msg);
       skip(msg);


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to