(hadoop-thirdparty) branch trunk updated: HADOOP-19163. Use protobuf-java 3.25.3 (#36). Contributed by Bilwa S T
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop-thirdparty.git The following commit(s) were added to refs/heads/trunk by this push: new cb960d0 HADOOP-19163. Use protobuf-java 3.25.3 (#36). Contributed by Bilwa S T cb960d0 is described below commit cb960d0eaac9d39ff7556345de25b57c82fc5a05 Author: BilwaST AuthorDate: Mon May 27 15:44:09 2024 +0530 HADOOP-19163. Use protobuf-java 3.25.3 (#36). Contributed by Bilwa S T --- LICENSE-binary | 2 +- {hadoop-shaded-protobuf_3_23 => hadoop-shaded-protobuf_3_25}/pom.xml | 2 +- pom.xml | 4 ++-- src/site/markdown/index.md.vm| 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 1510d48..95d9b42 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -218,7 +218,7 @@ See licenses-binary/ for text of these licenses. BSD 3-Clause -com.google.protobuf:protobuf-java:3.23.4 +com.google.protobuf:protobuf-java:3.25.3 MIT License diff --git a/hadoop-shaded-protobuf_3_23/pom.xml b/hadoop-shaded-protobuf_3_25/pom.xml similarity index 98% rename from hadoop-shaded-protobuf_3_23/pom.xml rename to hadoop-shaded-protobuf_3_25/pom.xml index 0c1d3c8..3e9a98d 100644 --- a/hadoop-shaded-protobuf_3_23/pom.xml +++ b/hadoop-shaded-protobuf_3_25/pom.xml @@ -27,7 +27,7 @@ ../pom.xml 4.0.0 - hadoop-shaded-protobuf_3_23 + hadoop-shaded-protobuf_3_25 Apache Hadoop shaded Protobuf jar diff --git a/pom.xml b/pom.xml index ae56ab1..e98cf77 100644 --- a/pom.xml +++ b/pom.xml @@ -94,7 +94,7 @@ org.apache.hadoop.thirdparty ${shaded.prefix}.protobuf -3.23.4 +3.25.3 32.0.1-jre 1.11.3 @@ -123,7 +123,7 @@ -hadoop-shaded-protobuf_3_23 +hadoop-shaded-protobuf_3_25 hadoop-shaded-guava hadoop-shaded-avro_1_11 diff --git a/src/site/markdown/index.md.vm b/src/site/markdown/index.md.vm index 11710d2..549975d 100644 --- a/src/site/markdown/index.md.vm +++ b/src/site/markdown/index.md.vm @@ -42,9 +42,9 @@ This page provides an overview of the major changes. Protobuf-java - -Google Protobuf's 3.23.4 jar is available as *org.apache.hadoop.thirdparty:hadoop-shaded-protobuf_3_23* artifact. +Google Protobuf's 3.25.3 jar is available as *org.apache.hadoop.thirdparty:hadoop-shaded-protobuf_3_25* artifact. -Following are relocations under *hadoop-shaded-protobuf_3_23* artifact: +Following are relocations under *hadoop-shaded-protobuf_3_25* artifact: |Original package | Shaded package | |---|---| - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-13147. Constructors must not call overrideable methods in PureJavaCrc32C (#6408). Contributed by Sebb.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new f11a8cfa6ee4 HADOOP-13147. Constructors must not call overrideable methods in PureJavaCrc32C (#6408). Contributed by Sebb. f11a8cfa6ee4 is described below commit f11a8cfa6ee4ab733fd5a93f114361973b0e33f7 Author: Sebb AuthorDate: Mon May 20 19:38:08 2024 +0100 HADOOP-13147. Constructors must not call overrideable methods in PureJavaCrc32C (#6408). Contributed by Sebb. --- .../org/apache/hadoop/util/PureJavaCrc32C.java | 8 - .../org/apache/hadoop/util/TestPureJavaCrc32C.java | 37 ++ 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java index bdf929709ab8..11388f0f1cba 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java @@ -39,7 +39,7 @@ public class PureJavaCrc32C implements Checksum { /** Create a new PureJavaCrc32 object. */ public PureJavaCrc32C() { -reset(); +resetFinal(); // safe to call as it cannot be overridden } @Override @@ -50,6 +50,12 @@ public class PureJavaCrc32C implements Checksum { @Override public void reset() { +resetFinal(); + } + + // This must be final as it is called by the ctor + // (can't also be private, as checkstyle then complains) + final void resetFinal() { crc = 0x; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32C.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32C.java new file mode 100644 index ..b085bbf2ac92 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32C.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.util; + +import java.util.zip.Checksum; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + + +public class TestPureJavaCrc32C { + + @Test + public void testChecksumInit() { +Checksum csum = new PureJavaCrc32C(); +long crc1 = csum.getValue(); +csum.reset(); +long crc2 = csum.getValue(); +assertEquals("reset should give same as initial value", crc1, crc2); + } +} - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.4 updated: MAPREDUCE-7475. Fix non-idempotent unit tests (#6785) (#6837)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.4 by this push: new 82f6d1184c8b MAPREDUCE-7475. Fix non-idempotent unit tests (#6785) (#6837) 82f6d1184c8b is described below commit 82f6d1184c8b7f6d1278e9ecd59cd0ce467254ba Author: Kaiyao Ke <47203510+kaiya...@users.noreply.github.com> AuthorDate: Sun May 19 15:32:54 2024 -0500 MAPREDUCE-7475. Fix non-idempotent unit tests (#6785) (#6837) Contributed by Kaiyao Ke --- .../mapreduce/v2/app/webapp/TestAppController.java | 2 ++ .../java/org/apache/hadoop/mapred/TestMapTask.java | 18 - .../hadoop/mapred/TestTaskProgressReporter.java| 6 ++ .../apache/hadoop/mapred/NotificationTestCase.java | 2 ++ .../hadoop/mapred/TestOldCombinerGrouping.java | 23 ++ .../hadoop/mapreduce/TestNewCombinerGrouping.java | 23 ++ 6 files changed, 53 insertions(+), 21 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java index ba5c43012146..473681c3e424 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java @@ -319,6 +319,8 @@ public class TestAppController { appController.attempts(); assertEquals(AttemptsPage.class, appController.getClazz()); + +appController.getProperty().remove(AMParams.ATTEMPT_STATE); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java index fef179994f09..771a5313ec32 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java @@ -32,6 +32,7 @@ import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.util.Progress; import org.junit.After; +import org.junit.Before; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -47,14 +48,21 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; public class TestMapTask { - private static File TEST_ROOT_DIR = new File( + private static File testRootDir = new File( System.getProperty("test.build.data", System.getProperty("java.io.tmpdir", "/tmp")), TestMapTask.class.getName()); + @Before + public void setup() throws Exception { +if(!testRootDir.exists()) { + testRootDir.mkdirs(); +} + } + @After public void cleanup() throws Exception { -FileUtil.fullyDelete(TEST_ROOT_DIR); +FileUtil.fullyDelete(testRootDir); } @Rule @@ -66,7 +74,7 @@ public class TestMapTask { public void testShufflePermissions() throws Exception { JobConf conf = new JobConf(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); -conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath()); +conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath()); MapOutputFile mof = new MROutputFiles(); mof.setConf(conf); TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1); @@ -98,7 +106,7 @@ public class TestMapTask { public void testSpillFilesCountLimitInvalidValue() throws Exception { JobConf conf = new JobConf(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); -conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath()); +conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath()); conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2); MapOutputFile mof = new MROutputFiles(); mof.setConf(conf); @@ -124,7 +132,7 @@ public class TestMapTask { public void testSpillFilesCountBreach() throws Exception { JobConf conf = new JobConf(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); -conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath()); +conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath()); conf.
(hadoop) branch trunk updated: HADOOP-19107. Drop support for HBase v1 & upgrade HBase v2 (#6629). Contributed by Ayush Saxena
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new eec9cd299707 HADOOP-19107. Drop support for HBase v1 & upgrade HBase v2 (#6629). Contributed by Ayush Saxena eec9cd299707 is described below commit eec9cd299707d7f34a1bbf340f5f4a6cf479f66a Author: Ayush Saxena AuthorDate: Mon Apr 22 21:55:58 2024 +0530 HADOOP-19107. Drop support for HBase v1 & upgrade HBase v2 (#6629). Contributed by Ayush Saxena --- BUILDING.txt | 9 +- LICENSE-binary | 8 +- .../main/resources/assemblies/hadoop-yarn-dist.xml | 2 +- hadoop-project/pom.xml | 59 +- .../pom.xml| 131 +--- .../reader/DummyTimelineReaderMetrics.java | 39 -- .../TestTimelineReaderWebServicesHBaseStorage.java | 11 - .../storage/TestTimelineReaderHBaseDown.java | 4 + .../pom.xml| 18 - .../pom.xml| 18 - .../pom.xml| 211 -- .../src/assembly/coprocessor.xml | 38 -- .../storage/common/HBaseTimelineServerUtils.java | 222 --- .../storage/common/package-info.java | 28 - .../storage/flow/FlowRunCoprocessor.java | 278 .../timelineservice/storage/flow/FlowScanner.java | 721 - .../storage/flow/FlowScannerOperation.java | 46 -- .../timelineservice/storage/flow/package-info.java | 29 - .../timelineservice/storage/package-info.java | 28 - .../pom.xml| 30 +- .../pom.xml| 62 +- .../metrics/TimelineReaderMetrics.java | 5 +- .../hadoop-yarn/hadoop-yarn-server/pom.xml | 1 + hadoop-yarn-project/pom.xml| 31 +- 24 files changed, 82 insertions(+), 1947 deletions(-) diff --git a/BUILDING.txt b/BUILDING.txt index ceac4213b918..191df097b219 100644 --- a/BUILDING.txt +++ b/BUILDING.txt @@ -163,14 +163,7 @@ Maven build goals: YARN Application Timeline Service V2 build options: YARN Timeline Service v.2 chooses Apache HBase as the primary backing storage. The supported - versions of Apache HBase are 1.7.1 (default) and 2.2.4. - - * HBase 1.7.1 is used by default to build Hadoop. The official releases are ready to use if you -plan on running Timeline Service v2 with HBase 1.7.1. - - * Use -Dhbase.profile=2.0 to build Hadoop with HBase 2.2.4. Provide this option if you plan -on running Timeline Service v2 with HBase 2.x. - + version of Apache HBase is 2.5.8. Snappy build options: diff --git a/LICENSE-binary b/LICENSE-binary index c9577c8737a8..ddce4209cc50 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -310,10 +310,10 @@ org.apache.commons:commons-validator:1.6 org.apache.curator:curator-client:5.2.0 org.apache.curator:curator-framework:5.2.0 org.apache.curator:curator-recipes:5.2.0 -org.apache.hbase:hbase-annotations:1.7.1 -org.apache.hbase:hbase-client:1.7.1 -org.apache.hbase:hbase-common:1.7.1 -org.apache.hbase:hbase-protocol:1.7.1 +org.apache.hbase:hbase-annotations:2.5.8 +org.apache.hbase:hbase-client:2.5.8 +org.apache.hbase:hbase-common:2.5.8 +org.apache.hbase:hbase-protocol:2.5.8 org.apache.htrace:htrace-core:3.1.0-incubating org.apache.htrace:htrace-core4:4.1.0-incubating org.apache.httpcomponents:httpclient:4.5.13 diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml index cd86ce4e4176..cb90d59fcd77 100644 --- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml +++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml @@ -245,7 +245,7 @@ -org.apache.hadoop:${hbase-server-artifactid} + org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-server-2 share/hadoop/${hadoop.component}/timelineservice diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 67cd8fe58432..c2b5fbe9d60d 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -203,8 +203,7 @@ 1.5.4 2.0 2.11.0 -1.7.1 -2.2.4 +2.5.8-hadoop3 4.13.2 5.8.2 5.8.2 @@ -501,6 +500,11 @@ ${hadoop.version} test-jar + +org.apache.hadoop + hadoop-yarn-server-timelineservice-hbase-server-2 +${hadoop.version} + org.apache.hadoop @@ -1811,6 +1815,10 @@ org.apache.yetus audience-annotations + +org.osgi +org.o
(hadoop) branch trunk updated (87cc2f1a1f92 -> 922c44a339e9)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 87cc2f1a1f92 HDFS-17465. RBF: Use ProportionRouterRpcFairnessPolicyController get “'ava.Lang. Error: Maximum permit count exceeded' (#6727) add 922c44a339e9 HADOOP-19130. FTPFileSystem rename with full qualified path broken (#6678). Contributed by shawn No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/fs/ftp/FTPFileSystem.java| 8 ++-- .../apache/hadoop/fs/ftp/TestFTPFileSystem.java| 49 +- 2 files changed, 52 insertions(+), 5 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HDFS-17449. Fix ill-formed decommission host name and port pair triggers IndexOutOfBound error (#6691). Contributed by ConfX
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 73e6931ed05e HDFS-17449. Fix ill-formed decommission host name and port pair triggers IndexOutOfBound error (#6691). Contributed by ConfX 73e6931ed05e is described below commit 73e6931ed05e25a91ee0854fe2dab7dc9a288746 Author: ConfX <114765570+teamco...@users.noreply.github.com> AuthorDate: Sat Apr 6 16:08:09 2024 +0800 HDFS-17449. Fix ill-formed decommission host name and port pair triggers IndexOutOfBound error (#6691). Contributed by ConfX Signed-off-by: Ayush Saxena --- .../src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java| 4 1 file changed, 4 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java index e171e2bbdc6f..9dd788d166a8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java @@ -106,6 +106,10 @@ public class HostsFileWriter { for (String hostNameAndPort : decommissionHostNameAndPorts) { DatanodeAdminProperties dn = new DatanodeAdminProperties(); String[] hostAndPort = hostNameAndPort.split(":"); + if (hostAndPort.length != 2) { +throw new IllegalArgumentException("The decommision host name and port format is " ++ "invalid. The format should be in :, not " + hostNameAndPort); + } dn.setHostName(hostAndPort[0]); dn.setPort(Integer.parseInt(hostAndPort[1])); dn.setAdminState(AdminStates.DECOMMISSIONED); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.4 updated: HADOOP-19123. Update to commons-configuration2 2.10.1 due to CVE (#6661). Contributed by PJ Fanning
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.4 by this push: new ca4599b3d8bb HADOOP-19123. Update to commons-configuration2 2.10.1 due to CVE (#6661). Contributed by PJ Fanning ca4599b3d8bb is described below commit ca4599b3d8bb1370050fc417d71596487563ebbd Author: PJ Fanning AuthorDate: Tue Apr 2 21:50:00 2024 +0200 HADOOP-19123. Update to commons-configuration2 2.10.1 due to CVE (#6661). Contributed by PJ Fanning Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- LICENSE-binary | 2 +- hadoop-project/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 0a3febb6daf2..b2a635a18757 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -300,7 +300,7 @@ net.minidev:accessors-smart:1.2 org.apache.avro:avro:1.9.2 org.apache.commons:commons-collections4:4.2 org.apache.commons:commons-compress:1.24.0 -org.apache.commons:commons-configuration2:2.8.0 +org.apache.commons:commons-configuration2:2.10.1 org.apache.commons:commons-csv:1.9.0 org.apache.commons:commons-digester:1.8.1 org.apache.commons:commons-lang3:3.12.0 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 5ed06e4ed241..aefeaa91baf1 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -1244,7 +1244,7 @@ org.apache.commons commons-configuration2 -2.8.0 +2.10.1 org.apache.commons - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-19123. Update to commons-configuration2 2.10.1 due to CVE (#6661). Contributed by PJ Fanning
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 1357bb162dfe HADOOP-19123. Update to commons-configuration2 2.10.1 due to CVE (#6661). Contributed by PJ Fanning 1357bb162dfe is described below commit 1357bb162dfe741b97bde29222d1e3c3f7425f20 Author: PJ Fanning AuthorDate: Tue Apr 2 21:50:00 2024 +0200 HADOOP-19123. Update to commons-configuration2 2.10.1 due to CVE (#6661). Contributed by PJ Fanning Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- LICENSE-binary | 2 +- hadoop-project/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 9fd8608df626..084907d1dd97 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -300,7 +300,7 @@ net.minidev:accessors-smart:1.2 org.apache.avro:avro:1.9.2 org.apache.commons:commons-collections4:4.2 org.apache.commons:commons-compress:1.24.0 -org.apache.commons:commons-configuration2:2.8.0 +org.apache.commons:commons-configuration2:2.10.1 org.apache.commons:commons-csv:1.9.0 org.apache.commons:commons-digester:1.8.1 org.apache.commons:commons-lang3:3.12.0 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 40fb065f8a7c..cb3018811cd1 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -1244,7 +1244,7 @@ org.apache.commons commons-configuration2 -2.8.0 +2.10.1 org.apache.commons - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-19077. Remove use of javax.ws.rs.core.HttpHeaders (#6554). Contributed by PJ Fanning
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new f7d1ec2d9e43 HADOOP-19077. Remove use of javax.ws.rs.core.HttpHeaders (#6554). Contributed by PJ Fanning f7d1ec2d9e43 is described below commit f7d1ec2d9e433b4073deac5d15a8234f0c6dd5e9 Author: PJ Fanning AuthorDate: Mon Apr 1 09:13:39 2024 +0200 HADOOP-19077. Remove use of javax.ws.rs.core.HttpHeaders (#6554). Contributed by PJ Fanning Signed-off-by: Ayush Saxena --- .../src/main/java/org/apache/hadoop/conf/ConfServlet.java | 2 +- .../src/test/java/org/apache/hadoop/conf/TestConfServlet.java | 2 +- .../src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java | 2 +- .../hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java | 2 +- .../hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java| 2 +- .../apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java| 2 +- .../java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java | 2 +- .../java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java | 4 +--- .../src/main/java/org/apache/hadoop/yarn/service/utils/HttpUtil.java | 2 +- .../hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java | 2 +- .../hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java | 2 +- .../resourcemanager/webapp/TestRMWebServicesAppsModification.java | 2 +- .../hadoop/yarn/server/router/webapp/FederationInterceptorREST.java | 2 +- .../apache/hadoop/yarn/server/router/webapp/RouterWebServiceUtil.java | 2 +- .../yarn/server/router/webapp/MockDefaultRequestInterceptorREST.java | 2 +- 15 files changed, 15 insertions(+), 17 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java index b427038fdddc..feaf5fdfefce 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java @@ -24,13 +24,13 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.HttpHeaders; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.classification.VisibleForTesting; +import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders; /** * A servlet to print out the running configuration data. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java index 6db47d6d22fd..dfb1f5567c6f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java @@ -27,10 +27,10 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; -import javax.ws.rs.core.HttpHeaders; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; +import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders; import org.eclipse.jetty.util.ajax.JSON; import org.w3c.dom.Document; import org.w3c.dom.Element; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index a8bd95b32a25..d0607e96dc42 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -52,7 +52,6 @@ import java.util.Set; import java.util.StringTokenizer; import java.util.concurrent.TimeUnit; -import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import org.apache.commons.io.IOUtils; @@ -128,6 +127,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenSelector; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector; import org.apache.hadoop.security.token.DelegationTokenIssuer; +import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders; import org.apache.hadoop.util.JsonSerialization; import
(hadoop) branch branch-3.4 updated: HDFS-17450. Add explicit dependency on httpclient jar (#6130). Contributed by PJ Fanning
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.4 by this push: new 19b3d6848f91 HDFS-17450. Add explicit dependency on httpclient jar (#6130). Contributed by PJ Fanning 19b3d6848f91 is described below commit 19b3d6848f91ba3f49e437d6b7fff2ae61c4e795 Author: PJ Fanning AuthorDate: Sat Mar 30 18:54:15 2024 +0100 HDFS-17450. Add explicit dependency on httpclient jar (#6130). Contributed by PJ Fanning Signed-off-by: Ayush Saxena --- hadoop-hdfs-project/hadoop-hdfs-client/pom.xml | 10 ++ 1 file changed, 10 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml index f06756a116d8..47b307bc5507 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml @@ -49,6 +49,16 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd;> + + org.apache.httpcomponents + httpclient + ${httpclient.version} + + + org.apache.httpcomponents + httpcore + ${httpcore.version} + junit junit - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (4807815e1cb4 -> 59976f1be277)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 4807815e1cb4 HDFS-17448. Enhance the stability of the unit test TestDiskBalancerCommand (#6690). Contributed by Haiyang Hu add 59976f1be277 HDFS-17450. Add explicit dependency on httpclient jar (#6130). Contributed by PJ Fanning No new revisions were added by this update. Summary of changes: hadoop-hdfs-project/hadoop-hdfs-client/pom.xml | 10 ++ 1 file changed, 10 insertions(+) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (06db6289cb40 -> 4807815e1cb4)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 06db6289cb40 HADOOP-19024. Use bouncycastle jdk18 1.77 (#6410). Contributed add 4807815e1cb4 HDFS-17448. Enhance the stability of the unit test TestDiskBalancerCommand (#6690). Contributed by Haiyang Hu No new revisions were added by this update. Summary of changes: .../server/diskbalancer/command/TestDiskBalancerCommand.java | 9 +++-- 1 file changed, 7 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.4 updated: HADOOP-19024. Use bouncycastle jdk18 1.77 (#6410). Contributed by PJ Fanning
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.4 by this push: new 30a303b13e04 HADOOP-19024. Use bouncycastle jdk18 1.77 (#6410). Contributed by PJ Fanning 30a303b13e04 is described below commit 30a303b13e04e62cfebcb4e67b8df341ba3ecd99 Author: PJ Fanning AuthorDate: Sat Mar 30 15:28:12 2024 +0100 HADOOP-19024. Use bouncycastle jdk18 1.77 (#6410). Contributed by PJ Fanning --- LICENSE-binary| 6 +++--- hadoop-client-modules/hadoop-client-integration-tests/pom.xml | 4 ++-- .../hadoop-cos/src/site/markdown/cloud-storage/index.md | 2 +- hadoop-common-project/hadoop-auth/pom.xml | 2 +- hadoop-common-project/hadoop-common/pom.xml | 2 +- hadoop-common-project/hadoop-kms/pom.xml | 2 +- hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml| 2 +- hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml | 2 +- hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml | 2 +- hadoop-hdfs-project/hadoop-hdfs/pom.xml | 2 +- .../hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml | 8 .../hadoop-mapreduce-client-jobclient/pom.xml | 4 ++-- hadoop-project/pom.xml| 8 hadoop-tools/hadoop-aws/pom.xml | 4 ++-- hadoop-tools/hadoop-azure/pom.xml | 4 ++-- hadoop-tools/hadoop-extras/pom.xml| 4 ++-- hadoop-tools/hadoop-gridmix/pom.xml | 4 ++-- hadoop-tools/hadoop-streaming/pom.xml | 4 ++-- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml| 2 +- .../hadoop-yarn-server-applicationhistoryservice/pom.xml | 2 +- .../hadoop-yarn-server/hadoop-yarn-server-common/pom.xml | 2 +- .../hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml | 2 +- .../hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml | 4 ++-- 23 files changed, 39 insertions(+), 39 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 3c02de55effc..0a3febb6daf2 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -483,9 +483,9 @@ com.microsoft.azure:azure-cosmosdb-gateway:2.4.5 com.microsoft.azure:azure-data-lake-store-sdk:2.3.3 com.microsoft.azure:azure-keyvault-core:1.0.0 com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7 -org.bouncycastle:bcpkix-jdk15on:1.70 -org.bouncycastle:bcprov-jdk15on:1.70 -org.bouncycastle:bcutil-jdk15on:1.70 +org.bouncycastle:bcpkix-jdk18on:1.77 +org.bouncycastle:bcprov-jdk18on:1.77 +org.bouncycastle:bcutil-jdk18on:1.77 org.checkerframework:checker-qual:2.5.2 org.codehaus.mojo:animal-sniffer-annotations:1.21 org.jruby.jcodings:jcodings:1.0.13 diff --git a/hadoop-client-modules/hadoop-client-integration-tests/pom.xml b/hadoop-client-modules/hadoop-client-integration-tests/pom.xml index eb50d7d36d70..6b3e9760d028 100644 --- a/hadoop-client-modules/hadoop-client-integration-tests/pom.xml +++ b/hadoop-client-modules/hadoop-client-integration-tests/pom.xml @@ -77,12 +77,12 @@ org.bouncycastle - bcprov-jdk15on + bcprov-jdk18on test org.bouncycastle - bcpkix-jdk15on + bcpkix-jdk18on test diff --git a/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md b/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md index 9c96ac365981..64647b03e9ba 100644 --- a/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md +++ b/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md @@ -86,7 +86,7 @@ Linux kernel 2.6+ - joda-time (version 2.9.9 recommended) - httpClient (version 4.5.1 or later recommended) - Jackson: jackson-core, jackson-databind, jackson-annotations (version 2.9.8 or later) -- bcprov-jdk15on (version 1.59 recommended) +- bcprov-jdk18on (version 1.77 recommended) Configure Properties diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml index 54330238bf21..014c7daf69e6 100644 --- a/hadoop-common-project/hadoop-auth/pom.xml +++ b/hadoop-common-project/hadoop-auth/pom.xml @@ -108,7 +108,7 @@ org.bouncycastle - bcprov-jdk15on + bcprov-jdk18on diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index ebefed708c71..3e201295cf64 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project
(hadoop) branch trunk updated: HADOOP-19024. Use bouncycastle jdk18 1.77 (#6410). Contributed
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 06db6289cb40 HADOOP-19024. Use bouncycastle jdk18 1.77 (#6410). Contributed 06db6289cb40 is described below commit 06db6289cb40d34cd382080dedc86ff8ae437f9c Author: PJ Fanning AuthorDate: Sat Mar 30 15:28:12 2024 +0100 HADOOP-19024. Use bouncycastle jdk18 1.77 (#6410). Contributed --- LICENSE-binary| 6 +++--- hadoop-client-modules/hadoop-client-integration-tests/pom.xml | 4 ++-- .../hadoop-cos/src/site/markdown/cloud-storage/index.md | 2 +- hadoop-common-project/hadoop-auth/pom.xml | 2 +- hadoop-common-project/hadoop-common/pom.xml | 2 +- hadoop-common-project/hadoop-kms/pom.xml | 2 +- hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml| 2 +- hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml | 2 +- hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml | 2 +- hadoop-hdfs-project/hadoop-hdfs/pom.xml | 2 +- .../hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml | 8 .../hadoop-mapreduce-client-jobclient/pom.xml | 4 ++-- hadoop-project/pom.xml| 8 hadoop-tools/hadoop-aws/pom.xml | 4 ++-- hadoop-tools/hadoop-azure/pom.xml | 4 ++-- hadoop-tools/hadoop-extras/pom.xml| 4 ++-- hadoop-tools/hadoop-gridmix/pom.xml | 4 ++-- hadoop-tools/hadoop-streaming/pom.xml | 4 ++-- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml| 2 +- .../hadoop-yarn-server-applicationhistoryservice/pom.xml | 2 +- .../hadoop-yarn-server/hadoop-yarn-server-common/pom.xml | 2 +- .../hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml | 2 +- .../hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml | 4 ++-- 23 files changed, 39 insertions(+), 39 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 0670df3c2aec..9fd8608df626 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -483,9 +483,9 @@ com.microsoft.azure:azure-cosmosdb-gateway:2.4.5 com.microsoft.azure:azure-data-lake-store-sdk:2.3.3 com.microsoft.azure:azure-keyvault-core:1.0.0 com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7 -org.bouncycastle:bcpkix-jdk15on:1.70 -org.bouncycastle:bcprov-jdk15on:1.70 -org.bouncycastle:bcutil-jdk15on:1.70 +org.bouncycastle:bcpkix-jdk18on:1.77 +org.bouncycastle:bcprov-jdk18on:1.77 +org.bouncycastle:bcutil-jdk18on:1.77 org.checkerframework:checker-qual:2.5.2 org.codehaus.mojo:animal-sniffer-annotations:1.21 org.jruby.jcodings:jcodings:1.0.13 diff --git a/hadoop-client-modules/hadoop-client-integration-tests/pom.xml b/hadoop-client-modules/hadoop-client-integration-tests/pom.xml index 4146acf89dcd..34be6956d7c6 100644 --- a/hadoop-client-modules/hadoop-client-integration-tests/pom.xml +++ b/hadoop-client-modules/hadoop-client-integration-tests/pom.xml @@ -77,12 +77,12 @@ org.bouncycastle - bcprov-jdk15on + bcprov-jdk18on test org.bouncycastle - bcpkix-jdk15on + bcpkix-jdk18on test diff --git a/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md b/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md index 9c96ac365981..64647b03e9ba 100644 --- a/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md +++ b/hadoop-cloud-storage-project/hadoop-cos/src/site/markdown/cloud-storage/index.md @@ -86,7 +86,7 @@ Linux kernel 2.6+ - joda-time (version 2.9.9 recommended) - httpClient (version 4.5.1 or later recommended) - Jackson: jackson-core, jackson-databind, jackson-annotations (version 2.9.8 or later) -- bcprov-jdk15on (version 1.59 recommended) +- bcprov-jdk18on (version 1.77 recommended) Configure Properties diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml index ed10bae74e1c..34827579ccfa 100644 --- a/hadoop-common-project/hadoop-auth/pom.xml +++ b/hadoop-common-project/hadoop-auth/pom.xml @@ -108,7 +108,7 @@ org.bouncycastle - bcprov-jdk15on + bcprov-jdk18on diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 8a672f2c03b1..9f6c91a36af9 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -374,7 +374,7
(hadoop) branch trunk updated: HDFS-17103. Fix file system cleanup in TestNameEditsConfigs (#6071). Contributed by ConfX.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new aabf31f6fa8c HDFS-17103. Fix file system cleanup in TestNameEditsConfigs (#6071). Contributed by ConfX. aabf31f6fa8c is described below commit aabf31f6fa8c18a878846512820ab606848829b0 Author: ConfX <114765570+teamco...@users.noreply.github.com> AuthorDate: Sat Mar 30 17:34:42 2024 +0800 HDFS-17103. Fix file system cleanup in TestNameEditsConfigs (#6071). Contributed by ConfX. Signed-off-by: Ayush Saxena - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-19090. Use protobuf-java 3.23.4. (#6593). Contributed by PJ Fanning.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new fc166d3aec7c HADOOP-19090. Use protobuf-java 3.23.4. (#6593). Contributed by PJ Fanning. fc166d3aec7c is described below commit fc166d3aec7c95110a8cd4ef6ce1fbf4955107e5 Author: PJ Fanning AuthorDate: Thu Mar 7 10:39:01 2024 +0100 HADOOP-19090. Use protobuf-java 3.23.4. (#6593). Contributed by PJ Fanning. --- hadoop-common-project/hadoop-common/pom.xml | 2 +- hadoop-project/pom.xml | 6 +++--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 392a26c2ff9a..3b03fc6aadb7 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -40,7 +40,7 @@ org.apache.hadoop.thirdparty - hadoop-shaded-protobuf_3_21 + hadoop-shaded-protobuf_3_23 org.apache.hadoop diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 043eb24bd405..1cf9401d707c 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -93,11 +93,11 @@ ${common.protobuf2.scope} -3.21.12 +3.23.4 ${env.HADOOP_PROTOC_PATH} 1.2.0 - ${hadoop-thirdparty.version} + 1.3.0-SNAPSHOT ${hadoop-thirdparty.version} org.apache.hadoop.thirdparty ${hadoop-thirdparty-shaded-prefix}.protobuf @@ -250,7 +250,7 @@ org.apache.hadoop.thirdparty -hadoop-shaded-protobuf_3_21 +hadoop-shaded-protobuf_3_23 ${hadoop-thirdparty-protobuf.version} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml index 7d16995fe5e3..217335323a79 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml @@ -51,7 +51,7 @@ org.apache.hadoop.thirdparty - hadoop-shaded-protobuf_3_21 + hadoop-shaded-protobuf_3_23 @@ -64,7 +64,7 @@ org.apache.hadoop.thirdparty - hadoop-shaded-protobuf_3_21 + hadoop-shaded-protobuf_3_23 @@ -75,7 +75,7 @@ org.apache.hadoop.thirdparty - hadoop-shaded-protobuf_3_21 + hadoop-shaded-protobuf_3_23 - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.4 updated: Revert "HADOOP-18823. Add Labeler Github Action. (#5874). Contributed by Ayush Saxena."
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.4 by this push: new a013f06fe5c7 Revert "HADOOP-18823. Add Labeler Github Action. (#5874). Contributed by Ayush Saxena." a013f06fe5c7 is described below commit a013f06fe5c7e9b024a4e04a42699cb2f205f210 Author: Ayush Saxena AuthorDate: Mon Jan 22 12:20:27 2024 +0530 Revert "HADOOP-18823. Add Labeler Github Action. (#5874). Contributed by Ayush Saxena." This reverts commit c04a17f1160e3dedcdf294d09f878136af75172a. Reverted from Branch-3.4, since this commit is relevant only for trunk. --- .github/labeler.yml | 57 --- .github/workflows/labeler.yml | 40 -- 2 files changed, 97 deletions(-) diff --git a/.github/labeler.yml b/.github/labeler.yml deleted file mode 100755 index a3fa437e0de2.. --- a/.github/labeler.yml +++ /dev/null @@ -1,57 +0,0 @@ -# -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# -# Pull Request Labeler Github Action Configuration: https://github.com/marketplace/actions/labeler - -trunk: - - '**' -INFRA: - - .asf.yaml - - .gitattributes - - .gitignore - - .github/** - - dev-support/** - - start-build-env.sh -BUILD: - - '**/pom.xml' -COMMON: - - hadoop-common-project/** -HDFS: - - hadoop-hdfs-project/** -RBF: - - hadoop-hdfs-project/hadoop-hdfs-rbf/** -NATIVE: - - hadoop-hdfs-project/hadoop-hdfs-native-client/** - - hadoop-common-project/hadoop-common/src/main/native/** -YARN: - - hadoop-yarn-project/** -MAPREDUCE: - - hadoop-mapreduce-project/** -DISTCP: - - hadoop-tools/hadoop-distcp/** -TOOLS: - - hadoop-tools/** -AWS: - - hadoop-tools/hadoop-aws/** -ABFS: - - hadoop-tools/hadoop-azure/** -DYNAMOMETER: - - hadoop-tools/hadoop-dynamometer/** -MAVEN-PLUGINS: - - hadoop-maven-plugins/** diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml deleted file mode 100644 index f85aff05dda6.. --- a/.github/workflows/labeler.yml +++ /dev/null @@ -1,40 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -name: "Pull Request Labeler" -on: pull_request_target - -permissions: - contents: read - pull-requests: write - -jobs: - triage: -runs-on: ubuntu-latest -steps: - - uses: actions/checkout@v3 -with: - sparse-checkout: | -.github - - uses: actions/labeler@v4.3.0 -with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - sync-labels: true - configuration-path: .github/labeler.yml - dot: true \ No newline at end of file - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (64beecb7cb51 -> 99a59ae9e61e)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 64beecb7cb51 YARN-11631. [GPG] Add GPGWebServices. (#6354) Contributed by Shilun Fan. add 99a59ae9e61e HDFS-17317. Improve the resource release for metaOut in DebugAdmin (#6402). Contributed by xy. No new revisions were added by this update. Summary of changes: .../src/main/java/org/apache/hadoop/hdfs/tools/DebugAdmin.java | 1 + 1 file changed, 1 insertion(+) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-2.10 updated: HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-2.10 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-2.10 by this push: new 3b4fe79e4a74 HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena. 3b4fe79e4a74 is described below commit 3b4fe79e4a74f4b9eb732a9c25ca7302d5a516f3 Author: Ayush Saxena AuthorDate: Mon Jan 1 12:51:54 2024 +0530 HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena. Reviewed-by: Ashutosh Gupta Reviewed-by: Shilun Fan --- hadoop-project/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index f7c54b9dd576..2a0d0c80eb33 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -31,7 +31,7 @@ -2023 +2024 false - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.3 updated: HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new a4f4abf45298 HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena. a4f4abf45298 is described below commit a4f4abf45298dbb90dd933f354073fb262a8093b Author: Ayush Saxena AuthorDate: Mon Jan 1 12:51:54 2024 +0530 HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena. Reviewed-by: Ashutosh Gupta Reviewed-by: Shilun Fan --- hadoop-project/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 9ca9ca29f3c9..225a566bf5b4 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -31,7 +31,7 @@ -2023 +2024 false - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 9a4d10763cb2 HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena. 9a4d10763cb2 is described below commit 9a4d10763cb2488db90242fa1fe8b5b5d24b5e8d Author: Ayush Saxena AuthorDate: Mon Jan 1 12:51:54 2024 +0530 HADOOP-19020. Update the year to 2024. (#6397). Contributed by Ayush Saxena. Reviewed-by: Ashutosh Gupta Reviewed-by: Shilun Fan --- hadoop-project/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index fbc42f4ef107..81c7205b619b 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -31,7 +31,7 @@ -2023 +2024 false - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HDFS-17215. RBF: Fix some method annotations about @throws . (#6136). Contributed by xiaojunxiang.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new e07e445326ac HDFS-17215. RBF: Fix some method annotations about @throws . (#6136). Contributed by xiaojunxiang. e07e445326ac is described below commit e07e445326ace76a0237692a8b28fdc481e3def7 Author: xiaojunxiang AuthorDate: Tue Dec 26 03:07:58 2023 +0800 HDFS-17215. RBF: Fix some method annotations about @throws . (#6136). Contributed by xiaojunxiang. --- .../java/org/apache/hadoop/hdfs/server/federation/router/Quota.java | 2 +- .../apache/hadoop/hdfs/server/federation/store/StateStoreService.java | 4 +--- .../hadoop/hdfs/server/federation/store/impl/MountTableStoreImpl.java | 4 ++-- .../store/protocol/impl/pb/FederationProtocolPBTranslator.java| 2 +- 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/Quota.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/Quota.java index 8e984d65c33e..e19e51b5733a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/Quota.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/Quota.java @@ -75,7 +75,7 @@ public class Quota { * @param storagespaceQuota Storage space quota. * @param type StorageType that the space quota is intended to be set on. * @param checkMountEntry whether to check the path is a mount entry. - * @throws AccessControlException If the quota system is disabled or if + * @throws IOException If the quota system is disabled or if * checkMountEntry is true and the path is a mount entry. */ public void setQuota(String path, long namespaceQuota, long storagespaceQuota, diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/StateStoreService.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/StateStoreService.java index ff0ea486e216..74997e86be3d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/StateStoreService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/StateStoreService.java @@ -138,7 +138,7 @@ public class StateStoreService extends CompositeService { * Initialize the State Store and the connection to the back-end. * * @param config Configuration for the State Store. - * @throws IOException Cannot create driver for the State Store. + * @throws Exception Cannot create driver for the State Store. */ @Override protected void serviceInit(Configuration config) throws Exception { @@ -239,7 +239,6 @@ public class StateStoreService extends CompositeService { * * @param Type of the records stored. * @param clazz Class of the record store to track. - * @return New record store. * @throws ReflectiveOperationException */ private > void addRecordStore( @@ -428,7 +427,6 @@ public class StateStoreService extends CompositeService { result = cachedStore.loadCache(force); } catch (IOException e) { LOG.error("Error updating cache for {}", cacheName, e); - result = false; } if (!result) { success = false; diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/impl/MountTableStoreImpl.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/impl/MountTableStoreImpl.java index a46410a27416..3ace406e49a3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/impl/MountTableStoreImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/impl/MountTableStoreImpl.java @@ -70,7 +70,7 @@ public class MountTableStoreImpl extends MountTableStore { * * @param src mount entry being accessed * @param action type of action being performed on the mount entry - * @throws AccessControlException if mount table cannot be accessed + * @throws IOException if mount table cannot be accessed */ private void checkMountTableEntryPermission(String src, FsAction action) throws IOException { @@ -90,7 +90,7 @@ public class MountTableStoreImpl extends MountTableStore { * Check parent path permission recursively. It needs WRITE permission * of the nearest parent entry and other EXECUTE permission. * @param src mount entry being checked - * @throws AccessControlException if mount table cannot be accessed
(hadoop) branch trunk updated (415e9bdfbdee -> 630ffb280d0b)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 415e9bdfbdee HDFS-17273. Change the way of computing some local variables duration for better debugging (#6321). Contributed by farmmamba. add 630ffb280d0b HDFS-17056. EC: Fix verifyClusterSetup output in case of an invalid param. (#6379). Contributed by huangzhaobo99. No new revisions were added by this update. Summary of changes: .../java/org/apache/hadoop/hdfs/tools/ECAdmin.java | 4 +++ .../org/apache/hadoop/hdfs/tools/TestECAdmin.java | 31 ++ 2 files changed, 35 insertions(+) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HDFS-17240. Fix a typo in DataStorage.java (#6226). Contributed by Yu Wang.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 77edca8f0a97 HDFS-17240. Fix a typo in DataStorage.java (#6226). Contributed by Yu Wang. 77edca8f0a97 is described below commit 77edca8f0a97668722a6d602aa4d08d1fff06172 Author: Wang Yu AuthorDate: Fri Dec 22 04:22:12 2023 +0800 HDFS-17240. Fix a typo in DataStorage.java (#6226). Contributed by Yu Wang. Reviewed-by: Shilun Fan Reviewed-by: Tao Li --- .../main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java index 5cf13f698e9b..590cd9f579b5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java @@ -704,7 +704,7 @@ public class DataStorage extends Storage { } /** - * Analize which and whether a transition of the fs state is required + * Analyze which and whether a transition of the fs state is required * and perform it if necessary. * * Rollback if the rollback startup option was specified. - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (de9fde8224fc -> f52c7d3e9a8c)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from de9fde8224fc YARN-11563. Fix typo in AbstractContainerAllocator from CSAssignemnt to CSAssignment (#6026). Contributed by wangzhongwei. add f52c7d3e9a8c HADOOP-18613. Upgrade ZooKeeper to version 3.8.3 (#6296). Contributed by Bilwa S T. No new revisions were added by this update. Summary of changes: LICENSE-binary | 2 +- hadoop-project/pom.xml | 18 +- 2 files changed, 18 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: YARN-11563. Fix typo in AbstractContainerAllocator from CSAssignemnt to CSAssignment (#6026). Contributed by wangzhongwei.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new de9fde8224fc YARN-11563. Fix typo in AbstractContainerAllocator from CSAssignemnt to CSAssignment (#6026). Contributed by wangzhongwei. de9fde8224fc is described below commit de9fde8224fc78e29bf2ddc9b102607c8c661635 Author: gavin.wang AuthorDate: Wed Dec 20 01:13:56 2023 +0800 YARN-11563. Fix typo in AbstractContainerAllocator from CSAssignemnt to CSAssignment (#6026). Contributed by wangzhongwei. Signed-off-by: Ayush Saxena --- .../scheduler/capacity/allocator/AbstractContainerAllocator.java| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/allocator/AbstractContainerAllocator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/allocator/AbstractContainerAllocator.java index 90b088efdfd4..14b252490d63 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/allocator/AbstractContainerAllocator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/allocator/AbstractContainerAllocator.java @@ -181,7 +181,7 @@ public abstract class AbstractContainerAllocator { * @param schedulingMode scheduling mode (exclusive or nonexclusive) * @param resourceLimits resourceLimits * @param reservedContainer reservedContainer - * @return CSAssignemnt proposal + * @return CSAssignment proposal */ public abstract CSAssignment assignContainers(Resource clusterResource, CandidateNodeSet candidates, - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: MAPREDUCE-7463. Fix missing comma in HistoryServerRest.html response body (#6342). Contributed by wangzhongwei.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new b5f68b622530 MAPREDUCE-7463. Fix missing comma in HistoryServerRest.html response body (#6342). Contributed by wangzhongwei. b5f68b622530 is described below commit b5f68b6225306bf821d1eec022598cf711023b7e Author: gavin.wang AuthorDate: Wed Dec 20 01:10:55 2023 +0800 MAPREDUCE-7463. Fix missing comma in HistoryServerRest.html response body (#6342). Contributed by wangzhongwei. Signed-off-by: Ayush Saxena --- .../hadoop-mapreduce-client-hs/src/site/markdown/HistoryServerRest.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/site/markdown/HistoryServerRest.md b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/site/markdown/HistoryServerRest.md index 89cf3f30902e..bb513f39a623 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/site/markdown/HistoryServerRest.md +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/site/markdown/HistoryServerRest.md @@ -176,7 +176,7 @@ Response Body: "finishTime" : 1326381356010 }, { -"submitTime" : 1326381446500 +"submitTime" : 1326381446500, "state" : "SUCCEEDED", "user" : "user1", "reducesTotal" : 1, - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HDFS-17282. Reconfig 'SlowIoWarningThreshold' parameters for datanode. (#6338). Contributed by huangzhaobo99
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 1498a8685d50 HDFS-17282. Reconfig 'SlowIoWarningThreshold' parameters for datanode. (#6338). Contributed by huangzhaobo99 1498a8685d50 is described below commit 1498a8685d504bab97bf4a7c71daffba375e Author: huangzhaobo AuthorDate: Thu Dec 14 14:17:53 2023 +0800 HDFS-17282. Reconfig 'SlowIoWarningThreshold' parameters for datanode. (#6338). Contributed by huangzhaobo99 Reviewed-by: Haiyang Hu Reviewed-by: Tao Li Signed-off-by: Ayush Saxena --- .../apache/hadoop/hdfs/server/datanode/DNConf.java | 9 +- .../hadoop/hdfs/server/datanode/DataNode.java | 25 - .../datanode/TestDataNodeReconfiguration.java | 32 ++ .../org/apache/hadoop/hdfs/tools/TestDFSAdmin.java | 2 +- 4 files changed, 65 insertions(+), 3 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java index be36ca70fe44..21b92db3073a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java @@ -37,6 +37,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_PMEM_CACHE_RECOV import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_PMEM_CACHE_RECOVERY_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_PROCESS_COMMANDS_THRESHOLD_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_PROCESS_COMMANDS_THRESHOLD_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_OVERWRITE_DOWNSTREAM_DERIVED_QOP_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_OVERWRITE_DOWNSTREAM_DERIVED_QOP_KEY; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY; @@ -114,7 +115,7 @@ public class DNConf { final long ibrInterval; volatile long initialBlockReportDelayMs; volatile long cacheReportInterval; - final long datanodeSlowIoWarningThresholdMs; + private volatile long datanodeSlowIoWarningThresholdMs; final String minimumNameNodeVersion; final String encryptionAlgorithm; @@ -522,4 +523,10 @@ public class DNConf { DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY, DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_DEFAULT, TimeUnit.MILLISECONDS); } + + public void setDatanodeSlowIoWarningThresholdMs(long threshold) { +Preconditions.checkArgument(threshold > 0, +DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_KEY + " should be greater than 0"); +datanodeSlowIoWarningThresholdMs = threshold; + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 8fb009dab850..9f73b1cd3c35 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -80,6 +80,8 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SLOWDISK_LOW_THR import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_DEFAULT; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_STARTUP_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_DEFAULT; @@ -371,7 +373,8 @@ public class DataNode extends ReconfigurableBase DFS_DISK_BALANCER_PLAN_VALID_INTERVAL, DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY, DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY, - DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY)); + DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY, + DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_KEY)); public static final String METRICS_LOG_NAME = "DataNodeMetricsLog"; @@ -735,6
(hadoop) branch trunk updated: HDFS-17278. Fix order dependent flakiness in TestViewfsWithNfs3.java under hadoop-hdfs-nfs module (#6329). Contributed by Ruby.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 81de229cf6d8 HDFS-17278. Fix order dependent flakiness in TestViewfsWithNfs3.java under hadoop-hdfs-nfs module (#6329). Contributed by Ruby. 81de229cf6d8 is described below commit 81de229cf6d8c40f7417488a534af3f1dc71ebdc Author: yijut2 <52221089+yij...@users.noreply.github.com> AuthorDate: Tue Dec 12 01:12:26 2023 -0600 HDFS-17278. Fix order dependent flakiness in TestViewfsWithNfs3.java under hadoop-hdfs-nfs module (#6329). Contributed by Ruby. Reviewed-by: Xing Lin Signed-off-by: Ayush Saxena --- .../java/org/apache/hadoop/hdfs/nfs/nfs3/TestDFSClientCache.java| 6 ++ 1 file changed, 6 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestDFSClientCache.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestDFSClientCache.java index ba9d46e07d1b..d093f51b1ba0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestDFSClientCache.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestDFSClientCache.java @@ -30,9 +30,15 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.nfs.conf.NfsConfiguration; import org.apache.hadoop.security.UserGroupInformation; +import org.junit.After; import org.junit.Test; public class TestDFSClientCache { + @After + public void cleanup() { +UserGroupInformation.reset(); + } + @Test public void testEviction() throws IOException { NfsConfiguration conf = new NfsConfiguration(); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HDFS-17272. NNThroughputBenchmark should support specifying the base directory for multi-client test (#6319). Contributed by caozhiqiang.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 37d6cada14e3 HDFS-17272. NNThroughputBenchmark should support specifying the base directory for multi-client test (#6319). Contributed by caozhiqiang. 37d6cada14e3 is described below commit 37d6cada14e3bc087399527e5fac7d0f488c8ac9 Author: caozhiqiang AuthorDate: Sun Dec 10 16:13:04 2023 +0800 HDFS-17272. NNThroughputBenchmark should support specifying the base directory for multi-client test (#6319). Contributed by caozhiqiang. Reviewed-by: Tao Li Signed-off-by: Ayush Saxena --- .../src/site/markdown/Benchmarking.md | 19 ++--- .../server/namenode/NNThroughputBenchmark.java | 85 -- .../server/namenode/TestNNThroughputBenchmark.java | 36 + 3 files changed, 107 insertions(+), 33 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Benchmarking.md b/hadoop-common-project/hadoop-common/src/site/markdown/Benchmarking.md index 26d5db37d685..2449ab5cdeda 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/Benchmarking.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/Benchmarking.md @@ -54,15 +54,15 @@ Following are all the operations supported along with their respective operation | OPERATION\_OPTION| Operation-specific parameters | |: |: | |`all` | _options for other operations_ | -|`create` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-close`] | -|`mkdirs` | [`-threads 3`] [`-dirs 10`] [`-dirsPerDir 2`] | -|`open` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] | -|`delete` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] | -|`append` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] [`-appendNewBlk`] | -|`fileStatus` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] | -|`rename` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] | -|`blockReport` | [`-datanodes 10`] [`-reports 30`] [`-blocksPerReport 100`] [`-blocksPerFile 10`] | -|`replication` | [`-datanodes 10`] [`-nodesToDecommission 1`] [`-nodeReplicationLimit 100`] [`-totalBlocks 100`] [`-replication 3`] | +|`create` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-close`] [`-baseDirName /nnThroughputBenchmark`] | +|`mkdirs` | [`-threads 3`] [`-dirs 10`] [`-dirsPerDir 2`] [`-baseDirName /nnThroughputBenchmark`] | +|`open` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] [`-baseDirName /nnThroughputBenchmark`] | +|`delete` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] [`-baseDirName /nnThroughputBenchmark`] | +|`append` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] [`-appendNewBlk`] [`-baseDirName /nnThroughputBenchmark`] | +|`fileStatus` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] [`-baseDirName /nnThroughputBenchmark`] | +|`rename` | [`-threads 3`] [`-files 10`] [`-filesPerDir 4`] [`-useExisting`] [`-baseDirName /nnThroughputBenchmark`] | +|`blockReport` | [`-datanodes 10`] [`-reports 30`] [`-blocksPerReport 100`] [`-blocksPerFile 10`] [`-baseDirName /nnThroughputBenchmark`] | +|`replication` | [`-datanodes 10`] [`-nodesToDecommission 1`] [`-nodeReplicationLimit 100`] [`-totalBlocks 100`] [`-replication 3`] [`-baseDirName /nnThroughputBenchmark`] | |`clean` | N/A | # Operation Options @@ -86,6 +86,7 @@ When running benchmarks with the above operation(s), please provide operation-sp |`-nodeReplicationLimit` | The maximum number of outgoing replication streams for a data-node. | |`-totalBlocks` | Number of total blocks to operate. | |`-replication` | Replication factor. Will be adjusted to number of data-nodes if it is larger than that. | +|`-baseDirName` | The base dir name for benchmarks, to support multiple clients submitting benchmark tests at the same time. | ### Reports diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java index 847f7dc0c12c..a4e88d759fb4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java @@ -162,11 +162,11 @@ public class NNThroughputBenchmark implements Tool { * specific name-node operation. */ abstract class OperationStatsBase { -protected static final String BASE_DIR_NAME = "/nnThroughputBenchmark"; +private String baseDirName = "/nnThroughputBenchmark"; protected static final String OP_ALL_NAME = &
(hadoop) branch trunk updated: HDFS-17279. RBF: Fix link to Fedbalance document (#6333). Contributed by Haiyang Hu.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new f96361fd5448 HDFS-17279. RBF: Fix link to Fedbalance document (#6333). Contributed by Haiyang Hu. f96361fd5448 is described below commit f96361fd544810a501bba69bd126b806313d33d0 Author: huhaiyang AuthorDate: Sat Dec 9 05:02:04 2023 +0800 HDFS-17279. RBF: Fix link to Fedbalance document (#6333). Contributed by Haiyang Hu. Reviewed-by: Inigo Goiri Signed-off-by: Ayush Saxena --- .../hadoop-hdfs-rbf/src/site/markdown/HDFSRouterFederation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/site/markdown/HDFSRouterFederation.md b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/site/markdown/HDFSRouterFederation.md index 098c73a3b71d..9d565f3c4248 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/site/markdown/HDFSRouterFederation.md +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/site/markdown/HDFSRouterFederation.md @@ -539,7 +539,7 @@ More metrics info can see [RBF Metrics](../../hadoop-project-dist/hadoop-common/ Router Federation Rename --- -Enable Router to rename across namespaces. Currently it is implemented based on [HDFS Federation Balance](../../hadoop-federation-balance/HDFSFederationBalance.md) and has some limits comparing with normal rename. +Enable Router to rename across namespaces. Currently it is implemented based on [HDFS Federation Balance](../../hadoop-federation-balance/HDFSFederationBalance.html) and has some limits comparing with normal rename. 1. It is much slower than the normal rename so need a longer RPC timeout configuration. See `ipc.client.rpc-timeout.ms` and its description for more information about RPC timeout. 2. It doesn't support snapshot path. 3. It doesn't support to rename path with multiple destinations. - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.3 updated: HADOOP-18924. Upgrade to grpc 1.53.0 due to CVEs (#6161). Contributed by PJ Fanning.. (#6313)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new fad316a83cae HADOOP-18924. Upgrade to grpc 1.53.0 due to CVEs (#6161). Contributed by PJ Fanning.. (#6313) fad316a83cae is described below commit fad316a83caeca88eeef4375f3af9abf69fe54f2 Author: PJ Fanning AuthorDate: Tue Dec 5 20:17:21 2023 +0100 HADOOP-18924. Upgrade to grpc 1.53.0 due to CVEs (#6161). Contributed by PJ Fanning.. (#6313) --- LICENSE-binary | 16 .../hadoop-yarn/hadoop-yarn-csi/pom.xml| 22 -- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index d84f311ce190..1c734cbe9641 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -255,13 +255,13 @@ commons-logging:commons-logging:1.1.3 commons-net:commons-net:3.9.0 de.ruedigermoeller:fst:2.50 io.dropwizard.metrics:metrics-core:3.2.4 -io.grpc:grpc-api:1.26.0 -io.grpc:grpc-context:1.26.0 -io.grpc:grpc-core:1.26.0 -io.grpc:grpc-netty:1.26.0 -io.grpc:grpc-protobuf:1.26.0 -io.grpc:grpc-protobuf-lite:1.26.0 -io.grpc:grpc-stub:1.26.0 +io.grpc:grpc-api:1.53.0 +io.grpc:grpc-context:1.53.0 +io.grpc:grpc-core:1.53.0 +io.grpc:grpc-netty:1.53.0 +io.grpc:grpc-protobuf:1.53.0 +io.grpc:grpc-protobuf-lite:1.53.0 +io.grpc:grpc-stub:1.53.0 io.netty:netty:3.10.6.Final io.netty:netty-all:4.1.100.Final io.netty:netty-buffer:4.1.100.Final @@ -446,7 +446,7 @@ org.bouncycastle:bcpkix-jdk15on:1.68 org.bouncycastle:bcprov-jdk15on:1.68 org.checkerframework:checker-qual:2.5.2 org.checkerframework:checker-qual:3.8.0 -org.codehaus.mojo:animal-sniffer-annotations:1.17 +org.codehaus.mojo:animal-sniffer-annotations:1.21 org.jruby.jcodings:jcodings:1.0.13 org.jruby.joni:joni:2.1.2 org.ojalgo:ojalgo:43.0 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml index be0e1684cbd3..1fad432ca094 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml @@ -26,7 +26,8 @@ jar -1.26.0 +1.53.0 +1.21 @@ -48,6 +49,17 @@ io.grpc grpc-core ${grpc.version} + + +org.codehaus.mojo +animal-sniffer-annotations + + + + +org.codehaus.mojo +animal-sniffer-annotations +${animal-sniffer.version} io.grpc @@ -65,6 +77,12 @@ io.grpc grpc-stub ${grpc.version} + + +org.codehaus.mojo +animal-sniffer-annotations + + io.grpc @@ -187,7 +205,7 @@ com.google.protobuf:protoc:${hadoop.protobuf.version}:exe:${os.detected.classifier} grpc-java - io.grpc:protoc-gen-grpc-java:1.26.0:exe:${os.detected.classifier} + io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier} - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (d0b460f27008 -> 2323ad24a206)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from d0b460f27008 HDFS-17233. The conf dfs.datanode.lifeline.interval.seconds is not considering time unit seconds (#6286). Contributed by Palakur Eshwitha Sai. add 2323ad24a206 HDFS-17260. Fix the logic for reconfigure slow peer enable for Namenode. (#6279). Contributed by huangzhaobo99. No new revisions were added by this update. Summary of changes: .../server/blockmanagement/DatanodeManager.java| 1 + .../blockmanagement/SlowPeerDisabledTracker.java | 6 .../TestReplicationPolicyExcludeSlowNodes.java | 40 ++ 3 files changed, 41 insertions(+), 6 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (e76477e823d4 -> d0b460f27008)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from e76477e823d4 HDFS-17271. Fix dead DN sorting in web UI (#6318). Contributed by Felix N. add d0b460f27008 HDFS-17233. The conf dfs.datanode.lifeline.interval.seconds is not considering time unit seconds (#6286). Contributed by Palakur Eshwitha Sai. No new revisions were added by this update. Summary of changes: .../main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java | 8 1 file changed, 4 insertions(+), 4 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HDFS-17271. Fix dead DN sorting in web UI (#6318). Contributed by Felix N.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new e76477e823d4 HDFS-17271. Fix dead DN sorting in web UI (#6318). Contributed by Felix N. e76477e823d4 is described below commit e76477e823d4fd50068b30fc07b50aa6fd01773f Author: Felix Nguyen <23214709+kokonguyen...@users.noreply.github.com> AuthorDate: Sat Dec 2 16:07:22 2023 +0800 HDFS-17271. Fix dead DN sorting in web UI (#6318). Contributed by Felix N. Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- .../hadoop-hdfs-rbf/src/main/webapps/router/federationhealth.html | 2 +- hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html| 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/webapps/router/federationhealth.html b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/webapps/router/federationhealth.html index af2f8854fbea..9367b2e99c11 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/webapps/router/federationhealth.html +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/webapps/router/federationhealth.html @@ -383,7 +383,7 @@ {state} {location}/{name} ({xferaddr}) -{#helper_relative_time value="{lastContact}"/} +{#helper_relative_time value="{lastContact}"/} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html index 9473c7ed3d77..200f5c730fce 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html @@ -356,7 +356,7 @@ {state} {location}/{name} ({xferaddr}) -{#helper_relative_time value="{lastContact}"/} +{#helper_relative_time value="{lastContact}"/} - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (89e78a76a0c3 -> 071f850841a6)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 89e78a76a0c3 HDFS-17259. Fix typo in TestFsDatasetImpl Class. (#6278). Contributed by huangzhaobo99. add 071f850841a6 HDFS-17261. RBF: Fix getFileInfo return wrong path when get mountTable path which is multi-level (#6288). Contributed by liuguanghua. No new revisions were added by this update. Summary of changes: .../federation/router/RouterClientProtocol.java| 32 +- .../federation/router/TestRouterMountTable.java| 23 2 files changed, 48 insertions(+), 7 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HDFS-17259. Fix typo in TestFsDatasetImpl Class. (#6278). Contributed by huangzhaobo99.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 89e78a76a0c3 HDFS-17259. Fix typo in TestFsDatasetImpl Class. (#6278). Contributed by huangzhaobo99. 89e78a76a0c3 is described below commit 89e78a76a0c3d50b5dd46350392bfeef638b72d4 Author: huangzhaobo AuthorDate: Fri Dec 1 13:16:31 2023 +0800 HDFS-17259. Fix typo in TestFsDatasetImpl Class. (#6278). Contributed by huangzhaobo99. --- .../hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java| 8 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java index d49198b53dd5..2f068a6a69c6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java @@ -1713,9 +1713,9 @@ public class TestFsDatasetImpl { @Test public void testNotifyNamenodeMissingOrNewBlock() throws Exception { long blockSize = 1024; -int heatbeatInterval = 1; +int heartbeatInterval = 1; HdfsConfiguration c = new HdfsConfiguration(); -c.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, heatbeatInterval); +c.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, heartbeatInterval); c.setLong(DFS_BLOCK_SIZE_KEY, blockSize); MiniDFSCluster cluster = new MiniDFSCluster.Builder(c). numDataNodes(1).build(); @@ -1964,9 +1964,9 @@ public class TestFsDatasetImpl { @Test public void tesInvalidateMissingBlock() throws Exception { long blockSize = 1024; -int heatbeatInterval = 1; +int heartbeatInterval = 1; HdfsConfiguration c = new HdfsConfiguration(); -c.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, heatbeatInterval); +c.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, heartbeatInterval); c.setLong(DFS_BLOCK_SIZE_KEY, blockSize); MiniDFSCluster cluster = new MiniDFSCluster.Builder(c). numDataNodes(1).build(); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-18924. Upgrade to grpc 1.53.0 due to CVEs (#6161). Contributed by PJ Fanning.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 3cb3dfafe508 HADOOP-18924. Upgrade to grpc 1.53.0 due to CVEs (#6161). Contributed by PJ Fanning. 3cb3dfafe508 is described below commit 3cb3dfafe508d3c505851749852578dece2d6525 Author: PJ Fanning AuthorDate: Fri Dec 1 05:23:47 2023 +0100 HADOOP-18924. Upgrade to grpc 1.53.0 due to CVEs (#6161). Contributed by PJ Fanning. Signed-off-by: Ayush Saxena --- LICENSE-binary | 16 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml | 16 ++-- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 90afc172e2d7..1742e787a66b 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -250,13 +250,13 @@ commons-daemon:commons-daemon:1.0.13 commons-io:commons-io:2.14.0 commons-net:commons-net:3.9.0 de.ruedigermoeller:fst:2.50 -io.grpc:grpc-api:1.26.0 -io.grpc:grpc-context:1.26.0 -io.grpc:grpc-core:1.26.0 -io.grpc:grpc-netty:1.26.0 -io.grpc:grpc-protobuf:1.26.0 -io.grpc:grpc-protobuf-lite:1.26.0 -io.grpc:grpc-stub:1.26.0 +io.grpc:grpc-api:1.53.0 +io.grpc:grpc-context:1.53.0 +io.grpc:grpc-core:1.53.0 +io.grpc:grpc-netty:1.53.0 +io.grpc:grpc-protobuf:1.53.0 +io.grpc:grpc-protobuf-lite:1.53.0 +io.grpc:grpc-stub:1.53.0 io.netty:netty-all:4.1.100.Final io.netty:netty-buffer:4.1.100.Final io.netty:netty-codec:4.1.100.Final @@ -482,7 +482,7 @@ com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7 org.bouncycastle:bcpkix-jdk15on:1.68 org.bouncycastle:bcprov-jdk15on:1.68 org.checkerframework:checker-qual:2.5.2 -org.codehaus.mojo:animal-sniffer-annotations:1.17 +org.codehaus.mojo:animal-sniffer-annotations:1.21 org.jruby.jcodings:jcodings:1.0.13 org.jruby.joni:joni:2.1.2 org.slf4j:jul-to-slf4j:jar:1.7.25 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml index 864067ce9746..38bc9085abf2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/pom.xml @@ -26,7 +26,8 @@ jar -1.26.0 +1.53.0 +1.21 @@ -48,6 +49,17 @@ io.grpc grpc-core ${grpc.version} + + +org.codehaus.mojo +animal-sniffer-annotations + + + + +org.codehaus.mojo +animal-sniffer-annotations +${animal-sniffer.version} io.grpc @@ -197,7 +209,7 @@ com.google.protobuf:protoc:${hadoop.protobuf.version}:exe:${os.detected.classifier} grpc-java - io.grpc:protoc-gen-grpc-java:1.26.0:exe:${os.detected.classifier} + io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier} - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop-site) branch asf-site updated: Improve description for security mailing lists. (#48)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch asf-site in repository https://gitbox.apache.org/repos/asf/hadoop-site.git The following commit(s) were added to refs/heads/asf-site by this push: new 9cc1d23afc Improve description for security mailing lists. (#48) 9cc1d23afc is described below commit 9cc1d23afc65a06d85ac15b4cd952218596d6efa Author: Ayush Saxena AuthorDate: Mon Nov 27 20:33:39 2023 +0530 Improve description for security mailing lists. (#48) --- content/mailing_lists.html | 8 src/mailing_lists.md | 9 + 2 files changed, 17 insertions(+) diff --git a/content/mailing_lists.html b/content/mailing_lists.html index 0ed3ec503a..646536e8ae 100644 --- a/content/mailing_lists.html +++ b/content/mailing_lists.html @@ -184,6 +184,14 @@ The Hadoop security mailing list is : mailto:secur...@hadoop.apache.org In order to post to the list, it is NOT necessary to first subscribe to it. For information on published vulnerabilities please see our CVE list. +This mailing list is only for discussing security vulnerabilities in hadoop source code, NOT security advisories for thirdparty libraries. For security issues related to thirdparty libraries use the dev/user mailing lists. +However, when after analysis it turns out the advisory impacts Hadoop, that should be discussed on the security list. +The thirdparty library versions in the upcoming releases can be checked here: + +https://github.com/apache/hadoop/blob/trunk/LICENSE-binary;>3.4.x +https://github.com/apache/hadoop/blob/branch-3.3/LICENSE-binary;>3.3.x + +Note: Not all vulnerabilities coming from thirdparty libraries impact hadoop and it isnt possible to update every thirdparty library. Read a hadoop developers point of view on upgrading thirdparty libraries https://s.apache.org/transitive-issues;>here General This mailing list is NOT for end-user questions and discussion. Please use the user mailing list for such issues. diff --git a/src/mailing_lists.md b/src/mailing_lists.md index 7ead9dad8d..bffeee6b25 100644 --- a/src/mailing_lists.md +++ b/src/mailing_lists.md @@ -55,6 +55,15 @@ to it. For information on published vulnerabilities please see our [CVE list](cve_list.html). +This mailing list is only for discussing security vulnerabilities in hadoop 'source' code, **NOT** security advisories for thirdparty libraries. For security issues related to thirdparty libraries use the dev/user mailing lists. +However, when after analysis it turns out the advisory impacts Hadoop, that should be discussed on the security list. + +The thirdparty library versions in the upcoming releases can be checked here: +* [3.4.x](https://github.com/apache/hadoop/blob/trunk/LICENSE-binary) +* [3.3.x](https://github.com/apache/hadoop/blob/branch-3.3/LICENSE-binary) + +**Note:** Not all vulnerabilities coming from thirdparty libraries impact hadoop and it isn't possible to update every thirdparty library. Read a hadoop developer's point of view on upgrading thirdparty libraries [here](https://s.apache.org/transitive-issues) + ## General This mailing list is **NOT** for end-user questions and discussion. - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated (616e381c9fb9 -> f609460bda0c)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 616e381c9fb9 YARN-11577. Improve FederationInterceptorREST Method Result. (#6190) Contributed by Shilun Fan. add f609460bda0c HADOOP-18957. Use StandardCharsets.UTF_8 (#6231). Contributed by PJ Fanning. No new revisions were added by this update. Summary of changes: .../authentication/examples/WhoClient.java | 4 +- .../server/PseudoAuthenticationHandler.java| 6 +- .../util/StringSignerSecretProvider.java | 4 +- .../util/TestZKSignerSecretProvider.java | 10 ++-- .../java/org/apache/hadoop/conf/Configuration.java | 4 +- .../main/java/org/apache/hadoop/fs/FileUtil.java | 4 +- .../fs/impl/FileSystemMultipartUploader.java | 14 ++--- .../org/apache/hadoop/fs/shell/CopyCommands.java | 3 +- .../java/org/apache/hadoop/http/HtmlQuoting.java | 2 +- .../org/apache/hadoop/io/DefaultStringifier.java | 13 ++-- .../java/org/apache/hadoop/io/WritableUtils.java | 7 ++- .../main/java/org/apache/hadoop/log/LogLevel.java | 4 +- .../apache/hadoop/metrics2/impl/MetricsConfig.java | 4 +- .../apache/hadoop/security/SaslPlainServer.java| 3 +- .../alias/AbstractJavaKeyStoreProvider.java| 6 +- .../apache/hadoop/security/alias/UserProvider.java | 2 +- .../web/DelegationTokenAuthenticationFilter.java | 6 +- .../token/delegation/web/ServletUtils.java | 5 +- .../java/org/apache/hadoop/util/SysInfoLinux.java | 14 ++--- .../main/java/org/apache/hadoop/util/ZKUtil.java | 6 +- .../hadoop/util/curator/ZKCuratorManager.java | 8 +-- .../hadoop/conf/TestCommonConfigurationFields.java | 2 +- .../java/org/apache/hadoop/fs/TestFileUtil.java| 4 +- .../apache/hadoop/fs/TestHarFileSystemBasics.java | 3 +- .../AbstractContractMultipartUploaderTest.java | 8 +-- .../hadoop/fs/contract/ContractTestUtils.java | 3 +- .../java/org/apache/hadoop/ha/TestHAAdmin.java | 6 +- .../apache/hadoop/http/TestIsActiveServlet.java| 3 +- .../org/apache/hadoop/io/TestSecureIOUtils.java| 3 +- .../test/java/org/apache/hadoop/io/TestText.java | 6 +- .../test/java/org/apache/hadoop/io/TestUTF8.java | 5 +- .../apache/hadoop/metrics2/sink/TestFileSink.java | 3 +- .../hadoop/metrics2/sink/TestStatsDMetrics.java| 6 +- .../org/apache/hadoop/net/TestTableMapping.java| 12 ++-- .../TestZKDelegationTokenSecretManager.java| 3 +- .../java/org/apache/hadoop/util/TestClasspath.java | 3 +- .../org/apache/hadoop/util/TestPureJavaCrc32.java | 3 +- .../java/org/apache/hadoop/util/TestZKUtil.java| 4 +- .../hadoop/util/curator/TestZKCuratorManager.java | 7 ++- .../crypto/key/kms/server/KMSJSONWriter.java | 5 +- .../registry/client/binding/JsonSerDeser.java | 4 +- .../registry/client/impl/zk/RegistrySecurity.java | 3 +- .../hadoop/registry/server/dns/RegistryDNS.java| 3 +- .../datatransfer/sasl/DataTransferSaslUtil.java| 4 +- .../datatransfer/sasl/SaslDataTransferClient.java | 10 ++-- .../hadoop/hdfs/util/CombinedHostsFileReader.java | 5 +- .../hadoop/hdfs/util/CombinedHostsFileWriter.java | 3 +- .../apache/hadoop/hdfs/web/WebHdfsFileSystem.java | 3 +- .../hadoop/hdfs/web/TestWebHdfsContentLength.java | 3 +- .../hadoop/fs/http/client/HttpFSFileSystem.java| 6 +- .../apache/hadoop/fs/http/server/HttpFSServer.java | 4 +- .../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java | 14 ++--- .../datatransfer/sasl/SaslDataTransferServer.java | 6 +- .../hadoop/hdfs/qjournal/server/Journal.java | 4 +- .../token/block/BlockTokenSecretManager.java | 4 +- .../apache/hadoop/hdfs/server/common/Storage.java | 4 +- .../hadoop/hdfs/server/datanode/DiskBalancer.java | 4 +- .../datanode/fsdataset/impl/PmemVolumeManager.java | 3 +- .../datanode/web/webhdfs/ExceptionHandler.java | 4 +- .../hadoop/hdfs/server/namenode/FSNamesystem.java | 6 +- .../web/resources/NamenodeWebHdfsMethods.java | 6 +- .../offlineEditsViewer/OfflineEditsXmlLoader.java | 4 +- .../offlineEditsViewer/StatisticsEditsVisitor.java | 4 +- .../tools/offlineImageViewer/FSImageHandler.java | 6 +- .../offlineImageViewer/ImageLoaderCurrent.java | 3 +- .../OfflineImageReconstructor.java | 4 +- .../offlineImageViewer/PBImageTextWriter.java | 13 ++-- .../offlineImageViewer/TextWriterImageVisitor.java | 4 +- .../org/apache/hadoop/hdfs/util/MD5FileUtils.java | 6 +- .../hadoop/hdfs/util/PersistentLongFile.java | 7 +-- .../java/org/apache/hadoop/hdfs/DFSTestUtil.java | 6 +- .../apache/hadoop/hdfs/TestBalancerBandwidth.java | 3 +- .../org/apache/hadoop/hdfs/TestDFSRollback.java| 6 +- .../org/apache/hadoop/hdfs/TestDFSUpgrade.java | 6 +- .../org/apache/hadoop/hdfs/TestDatanodeReport.java | 2 +- .../apache
(hadoop) branch trunk updated: HADOOP-18964. Update plugin for SBOM generation to 2.7.10 (#6235). Contributed by Vinod Anandan.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 2fa7d4fe8600 HADOOP-18964. Update plugin for SBOM generation to 2.7.10 (#6235). Contributed by Vinod Anandan. 2fa7d4fe8600 is described below commit 2fa7d4fe8600b3d1b735734cc557757afa70a73b Author: Vinod Anandan AuthorDate: Wed Nov 15 16:27:59 2023 +0200 HADOOP-18964. Update plugin for SBOM generation to 2.7.10 (#6235). Contributed by Vinod Anandan. Signed-off-by: Ayush Saxena --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index b86af01172f4..0c175d8da03c 100644 --- a/pom.xml +++ b/pom.xml @@ -118,7 +118,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x 4.2.0 1.1.1 3.10.1 -2.7.6 +2.7.10 bash - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: MAPREDUCE-7459. Fixed TestHistoryViewerPrinter flakiness during string comparison (#6215). Contributed by Rajiv Ramachandran.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new d6bb47e5db2f MAPREDUCE-7459. Fixed TestHistoryViewerPrinter flakiness during string comparison (#6215). Contributed by Rajiv Ramachandran. d6bb47e5db2f is described below commit d6bb47e5db2fac8236b23ad30389b5ae69a5c2bd Author: rRajivramachandran AuthorDate: Fri Nov 3 20:53:50 2023 -0500 MAPREDUCE-7459. Fixed TestHistoryViewerPrinter flakiness during string comparison (#6215). Contributed by Rajiv Ramachandran. Reviewed-by: Inigo Goiri Signed-off-by: Ayush Saxena --- .../mapreduce/jobhistory/TestHistoryViewerPrinter.java | 14 -- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestHistoryViewerPrinter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestHistoryViewerPrinter.java index cb508f6d29b9..3a1408bdff9d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestHistoryViewerPrinter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestHistoryViewerPrinter.java @@ -24,6 +24,7 @@ import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.TaskType; +import org.assertj.core.api.Assertions; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -35,7 +36,9 @@ import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.TimeZone; import java.util.Locale; @@ -160,6 +163,13 @@ public class TestHistoryViewerPrinter { LINE_SEPARATOR, outStr); } + private static void assertEqualLines(String str1, String str2) { +final List linesFromStr1 = Arrays.asList(str1.trim().split("\n")); +final List linesFromStr2 = Arrays.asList(str2.trim().split("\n")); + + Assertions.assertThat(linesFromStr1).containsExactlyInAnyOrderElementsOf(linesFromStr2); + } + @Test public void testHumanPrinterAll() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); @@ -168,7 +178,7 @@ public class TestHistoryViewerPrinter { TimeZone.getTimeZone("GMT")); String outStr = run(printer); if (System.getProperty("java.version").startsWith("1.7")) { - Assert.assertEquals("\n" + + assertEqualLines("\n" + "Hadoop job: job_1317928501754_0001\n" + "=\n" + "User: rkanter\n" + @@ -356,7 +366,7 @@ public class TestHistoryViewerPrinter { "localhost\ttask_1317928501754_0001_m_02, " + LINE_SEPARATOR, outStr); } else { - Assert.assertEquals("\n" + + assertEqualLines("\n" + "Hadoop job: job_1317928501754_0001\n" + "=\n" + "User: rkanter\n" + - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-18963. Fix typos in .gitignore (#6243)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 5b215f23d562 HADOOP-18963. Fix typos in .gitignore (#6243) 5b215f23d562 is described below commit 5b215f23d562e63c8b522d39852ef8d51f0ebdeb Author: YuanHanzhong AuthorDate: Sat Nov 4 07:42:39 2023 +0800 HADOOP-18963. Fix typos in .gitignore (#6243) --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 166954793dd0..84d9572cbb5e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -.DS_Store +*.DS_Store *.iml *.ipr *.iws - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.3 updated: HADOOP-18936. Upgrade to jetty 9.4.53 (#6181). Contributed by PJ Fanning. (#6239)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new b5276af574f8 HADOOP-18936. Upgrade to jetty 9.4.53 (#6181). Contributed by PJ Fanning. (#6239) b5276af574f8 is described below commit b5276af574f8b7de34fb99da4ad4dff3a0eeb508 Author: PJ Fanning AuthorDate: Wed Nov 1 10:00:00 2023 + HADOOP-18936. Upgrade to jetty 9.4.53 (#6181). Contributed by PJ Fanning. (#6239) Signed-off-by: Ayush Saxena --- LICENSE-binary | 28 ++-- hadoop-project/pom.xml | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index d23783f8408f..68105f4cbe92 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -343,20 +343,20 @@ org.apache.kerby:token-provider:1.0.1 org.apache.yetus:audience-annotations:0.5.0 org.apache.zookeeper:zookeeper:3.7.2 org.codehaus.jettison:jettison:1.5.4 -org.eclipse.jetty:jetty-annotations:9.4.51.v20230217 -org.eclipse.jetty:jetty-http:9.4.51.v20230217 -org.eclipse.jetty:jetty-io:9.4.51.v20230217 -org.eclipse.jetty:jetty-jndi:9.4.51.v20230217 -org.eclipse.jetty:jetty-plus:9.4.51.v20230217 -org.eclipse.jetty:jetty-security:9.4.51.v20230217 -org.eclipse.jetty:jetty-server:9.4.51.v20230217 -org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 -org.eclipse.jetty:jetty-util:9.4.51.v20230217 -org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 -org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 -org.eclipse.jetty:jetty-xml:9.4.51.v20230217 -org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.51.v20230217 -org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.51.v20230217 +org.eclipse.jetty:jetty-annotations:9.4.53.v20231009 +org.eclipse.jetty:jetty-http:9.4.53.v20231009 +org.eclipse.jetty:jetty-io:9.4.53.v20231009 +org.eclipse.jetty:jetty-jndi:9.4.53.v20231009 +org.eclipse.jetty:jetty-plus:9.4.53.v20231009 +org.eclipse.jetty:jetty-security:9.4.53.v20231009 +org.eclipse.jetty:jetty-server:9.4.53.v20231009 +org.eclipse.jetty:jetty-servlet:9.4.53.v20231009 +org.eclipse.jetty:jetty-util:9.4.53.v20231009 +org.eclipse.jetty:jetty-util-ajax:9.4.53.v20231009 +org.eclipse.jetty:jetty-webapp:9.4.53.v20231009 +org.eclipse.jetty:jetty-xml:9.4.53.v20231009 +org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.53.v20231009 +org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.53.v20231009 org.ehcache:ehcache:3.3.1 org.ini4j:ini4j:0.5.4 org.lz4:lz4-java:1.7.1 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 210fd471bdde..f89df0e6ff69 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -37,7 +37,7 @@ true true -9.4.51.v20230217 +9.4.53.v20231009 _ _ - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch branch-3.3 updated: HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133) (#6151). Contributed by PJ Fanning.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new 09c4f50364ae HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133) (#6151). Contributed by PJ Fanning. 09c4f50364ae is described below commit 09c4f50364aed25c81addcca4dc58dfa03e4214c Author: PJ Fanning AuthorDate: Mon Oct 30 04:05:38 2023 + HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133) (#6151). Contributed by PJ Fanning. --- LICENSE-binary | 2 +- .../common/HostRestrictingAuthorizationFilter.java | 17 ++--- .../hdfs/server/namenode/snapshot/TestSnapshot.java | 2 +- .../offlineImageViewer/TestOfflineImageViewer.java | 2 +- hadoop-project/pom.xml | 2 +- .../java/org/apache/hadoop/tools/HadoopArchiveLogs.java | 6 +- .../services/TestTextFileBasedIdentityHandler.java | 6 +++--- .../hadoop/yarn/logaggregation/AggregatedLogFormat.java | 6 +- .../yarn/server/timeline/TestLeveldbTimelineStore.java | 5 +++-- .../timeline/TestRollingLevelDBTimelineStore.java | 5 +++-- 10 files changed, 29 insertions(+), 24 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 5a3f5f12e9c9..d23783f8408f 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -250,7 +250,7 @@ commons-cli:commons-cli:1.2 commons-codec:commons-codec:1.11 commons-collections:commons-collections:3.2.2 commons-daemon:commons-daemon:1.0.13 -commons-io:commons-io:2.8.0 +commons-io:commons-io:2.14.0 commons-logging:commons-logging:1.1.3 commons-net:commons-net:3.9.0 de.ruedigermoeller:fst:2.50 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HostRestrictingAuthorizationFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HostRestrictingAuthorizationFilter.java index 0eb999039cb8..afed1e9e6e72 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HostRestrictingAuthorizationFilter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HostRestrictingAuthorizationFilter.java @@ -117,17 +117,12 @@ public class HostRestrictingAuthorizationFilter implements Filter { String rulePath = rule.getPath(); LOG.trace("Evaluating rule, subnet: {}, path: {}", subnet != null ? subnet.getCidrSignature() : "*", rulePath); - try { -if ((subnet == null || subnet.isInRange(remoteIp)) -&& FilenameUtils.directoryContains(rulePath, path)) { - LOG.debug("Found matching rule, subnet: {}, path: {}; returned true", - rule.getSubnet() != null ? subnet.getCidrSignature() : null, - rulePath); - return true; -} - } catch (IOException e) { -LOG.warn("Got IOException {}; returned false", e); -return false; + if ((subnet == null || subnet.isInRange(remoteIp)) + && FilenameUtils.directoryContains(rulePath, path)) { +LOG.debug("Found matching rule, subnet: {}, path: {}; returned true", +rule.getSubnet() != null ? subnet.getCidrSignature() : null, +rulePath); +return true; } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java index adfbbf16908c..3d4c29067247 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java @@ -257,7 +257,7 @@ public class TestSnapshot { FSImageTestUtil.getFSImage( cluster.getNameNode()).getStorage().getStorageDir(0)); assertNotNull("Didn't generate or can't find fsimage", originalFsimage); -PrintStream o = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM); +PrintStream o = new PrintStream(NullOutputStream.INSTANCE); PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), o); v.visit(new RandomAccessFile(originalFsimage, "r")); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java index 768b59b96626..dbd5309cd6a0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java +++ b/hadoop-
(hadoop) branch trunk updated: HADOOP-18917. Addendum. Fix deprecation issues after commons-io upgrade. (#6228). Contributed by PJ Fanning.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new a079f6261d77 HADOOP-18917. Addendum. Fix deprecation issues after commons-io upgrade. (#6228). Contributed by PJ Fanning. a079f6261d77 is described below commit a079f6261d77512a4eeb9a1d10e667caaecde29c Author: PJ Fanning AuthorDate: Mon Oct 30 04:05:02 2023 + HADOOP-18917. Addendum. Fix deprecation issues after commons-io upgrade. (#6228). Contributed by PJ Fanning. --- .../apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java | 2 +- .../hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java | 2 +- .../src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java| 6 +- .../org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java | 6 +- .../hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java | 5 +++-- .../yarn/server/timeline/TestRollingLevelDBTimelineStore.java | 5 +++-- 6 files changed, 18 insertions(+), 8 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java index 107333c5a63c..3a3898727fc1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java @@ -257,7 +257,7 @@ public class TestSnapshot { FSImageTestUtil.getFSImage( cluster.getNameNode()).getStorage().getStorageDir(0)); assertNotNull("Didn't generate or can't find fsimage", originalFsimage); -PrintStream o = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM); +PrintStream o = new PrintStream(NullOutputStream.INSTANCE); PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), o); v.visit(new RandomAccessFile(originalFsimage, "r")); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java index c24c9132cbcd..b2112a74e855 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java @@ -405,7 +405,7 @@ public class TestOfflineImageViewer { @Test(expected = IOException.class) public void testTruncatedFSImage() throws IOException { File truncatedFile = new File(tempDir, "truncatedFsImage"); -PrintStream output = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM); +PrintStream output = new PrintStream(NullOutputStream.INSTANCE); copyPartOfFile(originalFsimage, truncatedFile); try (RandomAccessFile r = new RandomAccessFile(truncatedFile, "r")) { new FileDistributionCalculator(new Configuration(), 0, 0, false, output) diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index f745a2e519e3..9b28ca406d69 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -54,6 +54,7 @@ import org.apache.hadoop.yarn.util.ConverterUtils; import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -505,7 +506,10 @@ public class HadoopArchiveLogs implements Tool { String classpath = halrJarPath + File.pathSeparator + harJarPath; FileWriterWithEncoding fw = null; try { - fw = new FileWriterWithEncoding(localScript, "UTF-8"); + fw = FileWriterWithEncoding.builder() + .setFile(localScript) + .setCharset(StandardCharsets.UTF_8) + .get(); fw.write("#!/bin/bash\nset -e\nset -x\n"); int containerCount = 1; for (AppInfo context : eligibleApplications) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index 477a8a293ceb..26c3e01a45d0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yar
(hadoop) branch trunk updated: HADOOP-18905. Negative timeout in ZKFailovercontroller due to overflow. (#6092). Contributed by ConfX.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 7c6af6a5f626 HADOOP-18905. Negative timeout in ZKFailovercontroller due to overflow. (#6092). Contributed by ConfX. 7c6af6a5f626 is described below commit 7c6af6a5f626d18d68b656d085cc23e4c1f7a1ef Author: ConfX <114765570+teamco...@users.noreply.github.com> AuthorDate: Sun Oct 29 16:00:28 2023 +0800 HADOOP-18905. Negative timeout in ZKFailovercontroller due to overflow. (#6092). Contributed by ConfX. Reviewed-by: Inigo Goiri Signed-off-by: Ayush Saxena --- .../src/main/java/org/apache/hadoop/ha/ZKFailoverController.java | 1 + 1 file changed, 1 insertion(+) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java index 487d7b940915..91f720a49eed 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java @@ -660,6 +660,7 @@ public abstract class ZKFailoverController { private void doGracefulFailover() throws ServiceFailedException, IOException, InterruptedException { int timeout = FailoverController.getGracefulFenceTimeout(conf) * 2; +Preconditions.checkArgument(timeout >= 0, "timeout should be non-negative."); // Phase 1: pre-flight checks checkEligibleForFailover(); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
(hadoop) branch trunk updated: HADOOP-18936. Upgrade to jetty 9.4.53 (#6181). Contributed by PJ Fanning.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new b9c9c42b2911 HADOOP-18936. Upgrade to jetty 9.4.53 (#6181). Contributed by PJ Fanning. b9c9c42b2911 is described below commit b9c9c42b291187cdf7a4f1d2ae959e31367ec95b Author: PJ Fanning AuthorDate: Sun Oct 29 07:39:12 2023 + HADOOP-18936. Upgrade to jetty 9.4.53 (#6181). Contributed by PJ Fanning. Signed-off-by: Ayush Saxena --- LICENSE-binary | 28 ++-- hadoop-project/pom.xml | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index e2f61dc7cd84..3a0e19c5824d 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -339,20 +339,20 @@ org.apache.solr:solr-solrj:8.11.2 org.apache.yetus:audience-annotations:0.5.0 org.apache.zookeeper:zookeeper:3.7.2 org.codehaus.jettison:jettison:1.5.4 -org.eclipse.jetty:jetty-annotations:9.4.51.v20230217 -org.eclipse.jetty:jetty-http:9.4.51.v20230217 -org.eclipse.jetty:jetty-io:9.4.51.v20230217 -org.eclipse.jetty:jetty-jndi:9.4.51.v20230217 -org.eclipse.jetty:jetty-plus:9.4.51.v20230217 -org.eclipse.jetty:jetty-security:9.4.51.v20230217 -org.eclipse.jetty:jetty-server:9.4.51.v20230217 -org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 -org.eclipse.jetty:jetty-util:9.4.51.v20230217 -org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 -org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 -org.eclipse.jetty:jetty-xml:9.4.51.v20230217 -org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.51.v20230217 -org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.51.v20230217 +org.eclipse.jetty:jetty-annotations:9.4.53.v20231009 +org.eclipse.jetty:jetty-http:9.4.53.v20231009 +org.eclipse.jetty:jetty-io:9.4.53.v20231009 +org.eclipse.jetty:jetty-jndi:9.4.53.v20231009 +org.eclipse.jetty:jetty-plus:9.4.53.v20231009 +org.eclipse.jetty:jetty-security:9.4.53.v20231009 +org.eclipse.jetty:jetty-server:9.4.53.v20231009 +org.eclipse.jetty:jetty-servlet:9.4.53.v20231009 +org.eclipse.jetty:jetty-util:9.4.53.v20231009 +org.eclipse.jetty:jetty-util-ajax:9.4.53.v20231009 +org.eclipse.jetty:jetty-webapp:9.4.53.v20231009 +org.eclipse.jetty:jetty-xml:9.4.53.v20231009 +org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.53.v20231009 +org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.53.v20231009 org.ehcache:ehcache:3.3.1 org.ini4j:ini4j:0.5.4 org.lz4:lz4-java:1.7.1 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 25e48f293a64..5b1c569d21de 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -37,7 +37,7 @@ true true -9.4.51.v20230217 +9.4.53.v20231009 _ _ - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch branch-3.3 updated: HADOOP-18711. upgrade nimbus jwt jar due to issues in its embedded shaded json-smart code. (#5573). Contributed by PJ Fanning. (#6201)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new 61c7b55acb5a HADOOP-18711. upgrade nimbus jwt jar due to issues in its embedded shaded json-smart code. (#5573). Contributed by PJ Fanning. (#6201) 61c7b55acb5a is described below commit 61c7b55acb5aa08038d2e7d7282c86aa5ae025b0 Author: PJ Fanning AuthorDate: Fri Oct 27 04:41:24 2023 +0100 HADOOP-18711. upgrade nimbus jwt jar due to issues in its embedded shaded json-smart code. (#5573). Contributed by PJ Fanning. (#6201) Signed-off-by: Ayush Saxena --- LICENSE-binary | 2 +- hadoop-project/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 09c023f182e8..5a3f5f12e9c9 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -242,7 +242,7 @@ com.google.guava:guava:jar:30.1.1-jre com.google.guava:listenablefuture:.0-empty-to-avoid-conflict-with-guava com.google.j2objc:j2objc-annotations:1.3 com.microsoft.azure:azure-storage:7.0.1 -com.nimbusds:nimbus-jose-jwt:9.8.1 +com.nimbusds:nimbus-jose-jwt:9.31 com.yammer.metrics:metrics-core:2.2.0 com.zaxxer:HikariCP-java7:2.4.12 commons-beanutils:commons-beanutils:1.9.4 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 9af44ff19b56..9097fcca8b4c 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -217,7 +217,7 @@ 8.8.2 1.1.3.Final 5.4.0 -9.8.1 +9.31 v12.22.1 v1.22.5 1.10.13 - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (6e13e4addc14 -> 5eeab5e1b972)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 6e13e4addc14 HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu. add 5eeab5e1b972 HDFS-17235. Fix javadoc errors in BlockManager (#6214). Contributed by Haiyang Hu. No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 6e13e4addc14 HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu. 6e13e4addc14 is described below commit 6e13e4addc14388450cdf6ac6e890d1c95d47e4d Author: jianghuazhu <740087...@qq.com> AuthorDate: Wed Oct 18 07:35:33 2023 +0800 HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu. Reviewed-by: Inigo Goiri Signed-off-by: Ayush Saxena --- .../org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 783000bbef2f..848da7bd1154 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -150,10 +150,10 @@ import org.slf4j.LoggerFactory; * redundancy. * * For regular replication, # of min live replicas for maintenance is determined - * by DFS_NAMENODE_MAINTENANCE_REPLICATION_MIN_KEY. This number has to = - * DFS_NAMENODE_REPLICATION_MIN_KEY. + * by {@link DFS_NAMENODE_MAINTENANCE_REPLICATION_MIN_KEY}. This number has to = + * {@link DFS_NAMENODE_REPLICATION_MIN_KEY}. * For erasure encoding, # of min live replicas for maintenance is - * BlockInfoStriped#getRealDataBlockNum. + * {@link BlockInfoStriped#getRealDataBlockNum}. * * Another safety property is to satisfy the block placement policy. While the * policy is configurable, the replicas the policy is applied to are the live - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: Revert "HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu."
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new fbd653be9bdf Revert "HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu." fbd653be9bdf is described below commit fbd653be9bdffa425c701322eaa9a73b823b282c Author: Ayush Saxena AuthorDate: Mon Oct 23 19:35:12 2023 +0530 Revert "HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu." This reverts commit 81ba2e8484c4315bb9a765374df4bb2a05bc0ebd. --- .../org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 848da7bd1154..783000bbef2f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -150,10 +150,10 @@ import org.slf4j.LoggerFactory; * redundancy. * * For regular replication, # of min live replicas for maintenance is determined - * by {@link DFS_NAMENODE_MAINTENANCE_REPLICATION_MIN_KEY}. This number has to = - * {@link DFS_NAMENODE_REPLICATION_MIN_KEY}. + * by DFS_NAMENODE_MAINTENANCE_REPLICATION_MIN_KEY. This number has to = + * DFS_NAMENODE_REPLICATION_MIN_KEY. * For erasure encoding, # of min live replicas for maintenance is - * {@link BlockInfoStriped#getRealDataBlockNum}. + * BlockInfoStriped#getRealDataBlockNum. * * Another safety property is to satisfy the block placement policy. While the * policy is configurable, the replicas the policy is applied to are the live - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (e0563fed50f4 -> 81ba2e8484c4)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from e0563fed50f4 HADOOP-18908. Improve S3A region handling. (#6187) add 81ba2e8484c4 HDFS-17228. Improve documentation related to BlockManager. (#6195). Contributed by JiangHua Zhu. No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop-site] branch asf-site updated: Add DOAP File
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch asf-site in repository https://gitbox.apache.org/repos/asf/hadoop-site.git The following commit(s) were added to refs/heads/asf-site by this push: new e4501dfb46 Add DOAP File e4501dfb46 is described below commit e4501dfb46d8662fcb3d63289e170ef9c5c5bcbe Author: Ayush Saxena AuthorDate: Mon Oct 16 01:16:05 2023 +0530 Add DOAP File --- content/doap_Hadoop.rdf | 52 + 1 file changed, 52 insertions(+) diff --git a/content/doap_Hadoop.rdf b/content/doap_Hadoop.rdf new file mode 100644 index 00..2b25fa687a --- /dev/null +++ b/content/doap_Hadoop.rdf @@ -0,0 +1,52 @@ + + +http://usefulinc.com/ns/doap#; + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#; + xmlns:asfext="http://projects.apache.org/ns/asfext#; + xmlns:foaf="http://xmlns.com/foaf/0.1/;> + + https://hadoop.apache.org/;> +2023-10-15 +https://spdx.org/licenses/Apache-2.0; /> +Apache Hadoop +https://hadoop.apache.org/; /> +https://hadoop.apache.org; /> +The Apache Hadoop software library is a framework that allows for the distributed processing of large data sets across clusters of computers using simple programming models +The Apache Hadoop software library is a framework that allows for the distributed processing of large data sets across clusters of computers using simple programming models. It is designed to scale up from single servers to thousands of machines, each offering local computation and storage. Rather than rely on hardware to deliver high-availability, the library itself is designed to detect and handle failures at the application layer, so delivering a highly-available serv [...] +https://issues.apache.org/jira/projects/HADOOP; /> +https://hadoop.apache.org/mailing_lists.html; /> +https://hadoop.apache.org/releases.html; /> +Java +https://projects.apache.org/category/big-data; /> + + +Stable Release +2023-06-18 +3.3.6 + + + + +https://github.com/apache/hadoop"/> +https://github.com/apache/hadoop"/> + + + + + - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop-site] branch asf-site updated: Add DOAP File
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch asf-site in repository https://gitbox.apache.org/repos/asf/hadoop-site.git The following commit(s) were added to refs/heads/asf-site by this push: new 7cf9f82e00 Add DOAP File 7cf9f82e00 is described below commit 7cf9f82e007935ae273aba88f5a74f3fa2b8ad7c Author: Ayush Saxena AuthorDate: Mon Oct 16 01:07:16 2023 +0530 Add DOAP File --- static/doap_Hadoop.rdf | 52 ++ 1 file changed, 52 insertions(+) diff --git a/static/doap_Hadoop.rdf b/static/doap_Hadoop.rdf new file mode 100644 index 00..2b25fa687a --- /dev/null +++ b/static/doap_Hadoop.rdf @@ -0,0 +1,52 @@ + + +http://usefulinc.com/ns/doap#; + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#; + xmlns:asfext="http://projects.apache.org/ns/asfext#; + xmlns:foaf="http://xmlns.com/foaf/0.1/;> + + https://hadoop.apache.org/;> +2023-10-15 +https://spdx.org/licenses/Apache-2.0; /> +Apache Hadoop +https://hadoop.apache.org/; /> +https://hadoop.apache.org; /> +The Apache Hadoop software library is a framework that allows for the distributed processing of large data sets across clusters of computers using simple programming models +The Apache Hadoop software library is a framework that allows for the distributed processing of large data sets across clusters of computers using simple programming models. It is designed to scale up from single servers to thousands of machines, each offering local computation and storage. Rather than rely on hardware to deliver high-availability, the library itself is designed to detect and handle failures at the application layer, so delivering a highly-available serv [...] +https://issues.apache.org/jira/projects/HADOOP; /> +https://hadoop.apache.org/mailing_lists.html; /> +https://hadoop.apache.org/releases.html; /> +Java +https://projects.apache.org/category/big-data; /> + + +Stable Release +2023-06-18 +3.3.6 + + + + +https://github.com/apache/hadoop"/> +https://github.com/apache/hadoop"/> + + + + + - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch branch-3.3 updated: HADOOP-18916. Exclude all module-info classes from uber jars (#6131) (#6188)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new 553b8ff7d4e2 HADOOP-18916. Exclude all module-info classes from uber jars (#6131) (#6188) 553b8ff7d4e2 is described below commit 553b8ff7d4e2ad84a8e99fb183a06788987c759a Author: PJ Fanning AuthorDate: Sat Oct 14 09:59:30 2023 +0100 HADOOP-18916. Exclude all module-info classes from uber jars (#6131) (#6188) Removes java9 and java11 from all modules pulled into the hadoop-client and hadoop-client-minicluster modules. Contributed by PJ Fanning --- hadoop-client-modules/hadoop-client-minicluster/pom.xml | 15 ++- hadoop-client-modules/hadoop-client-runtime/pom.xml | 15 ++- 2 files changed, 4 insertions(+), 26 deletions(-) diff --git a/hadoop-client-modules/hadoop-client-minicluster/pom.xml b/hadoop-client-modules/hadoop-client-minicluster/pom.xml index 15ee04c9d052..67e3a8b1b183 100644 --- a/hadoop-client-modules/hadoop-client-minicluster/pom.xml +++ b/hadoop-client-modules/hadoop-client-minicluster/pom.xml @@ -751,21 +751,10 @@ - com.fasterxml.jackson.*:* - - META-INF/versions/9/module-info.class - - - - com.google.code.gson:gson - - META-INF/versions/9/module-info.class - - - - org.apache.commons:commons-compress + *:* META-INF/versions/9/module-info.class + META-INF/versions/11/module-info.class diff --git a/hadoop-client-modules/hadoop-client-runtime/pom.xml b/hadoop-client-modules/hadoop-client-runtime/pom.xml index c16f2677590d..78153c1f0728 100644 --- a/hadoop-client-modules/hadoop-client-runtime/pom.xml +++ b/hadoop-client-modules/hadoop-client-runtime/pom.xml @@ -245,21 +245,10 @@ - com.fasterxml.jackson.*:* - - META-INF/versions/9/module-info.class - - - - com.google.code.gson:gson - - META-INF/versions/9/module-info.class - - - - org.apache.commons:commons-compress + *:* META-INF/versions/9/module-info.class + META-INF/versions/11/module-info.class - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch branch-3.3 updated: HADOOP-18923. Switch to SPDX identifier for license name (#6149). Contributed by Colm O hEigeartaigh.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new 6f682195ba56 HADOOP-18923. Switch to SPDX identifier for license name (#6149). Contributed by Colm O hEigeartaigh. 6f682195ba56 is described below commit 6f682195ba564fdc8e9a4f2607f8ee87abb4d392 Author: Colm O hEigeartaigh AuthorDate: Sat Oct 7 18:20:38 2023 +0100 HADOOP-18923. Switch to SPDX identifier for license name (#6149). Contributed by Colm O hEigeartaigh. Signed-off-by: Ayush Saxena --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e87525491f54..a54d09f6ee47 100644 --- a/pom.xml +++ b/pom.xml @@ -68,7 +68,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x - Apache License, Version 2.0 + Apache-2.0 https://www.apache.org/licenses/LICENSE-2.0.txt - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch branch-3.2 updated: HADOOP-18923. Switch to SPDX identifier for license name (#6149). Contributed by Colm O hEigeartaigh.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.2 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.2 by this push: new 885af9d5423a HADOOP-18923. Switch to SPDX identifier for license name (#6149). Contributed by Colm O hEigeartaigh. 885af9d5423a is described below commit 885af9d5423ac9c67ecc57a99cbaea98ee8a3940 Author: Colm O hEigeartaigh AuthorDate: Sat Oct 7 18:20:38 2023 +0100 HADOOP-18923. Switch to SPDX identifier for license name (#6149). Contributed by Colm O hEigeartaigh. Signed-off-by: Ayush Saxena --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 4ce2919a9deb..0d9696e19815 100644 --- a/pom.xml +++ b/pom.xml @@ -68,7 +68,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x - Apache License, Version 2.0 + Apache-2.0 https://www.apache.org/licenses/LICENSE-2.0.txt - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (daa78adc8887 -> ee1ebbe5f99e)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from daa78adc8887 HDFS-17200. Add some datanode related metrics to Metrics.md. (#6099). Contributed by huangzhaobo99 add ee1ebbe5f99e HADOOP-18923. Switch to SPDX identifier for license name (#6149). Contributed by Colm O hEigeartaigh. No new revisions were added by this update. Summary of changes: pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17200. Add some datanode related metrics to Metrics.md. (#6099). Contributed by huangzhaobo99
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new daa78adc8887 HDFS-17200. Add some datanode related metrics to Metrics.md. (#6099). Contributed by huangzhaobo99 daa78adc8887 is described below commit daa78adc888704e5688b84b404573ed1e28012db Author: huangzhaobo AuthorDate: Fri Oct 6 15:10:44 2023 +0800 HDFS-17200. Add some datanode related metrics to Metrics.md. (#6099). Contributed by huangzhaobo99 Signed-off-by: Ayush Saxena --- hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md | 8 ++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md index 01d89b81356e..45c323aa9d7e 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md @@ -508,8 +508,12 @@ Each metrics record contains tags such as SessionId and Hostname as additional i | `PacketsSlowWriteToMirror` | Total number of packets whose write to other Datanodes in the pipeline takes more than a certain time (300ms by default) | | `PacketsSlowWriteToDisk` | Total number of packets whose write to disk takes more than a certain time (300ms by default) | | `PacketsSlowWriteToOsCache` | Total number of packets whose write to os cache takes more than a certain time (300ms by default) | -| `slowFlushOrSyncCount` | Total number of packets whose sync/flush takes more than a certain time (300ms by default) | -| `slowAckToUpstreamCount` | Total number of packets whose upstream ack takes more than a certain time (300ms by default) | +| `SlowFlushOrSyncCount` | Total number of packets whose sync/flush takes more than a certain time (300ms by default) | +| `SlowAckToUpstreamCount` | Total number of packets whose upstream ack takes more than a certain time (300ms by default) | +| `SumOfActorCommandQueueLength` | Sum of all BPServiceActors command queue length | +| `NumProcessedCommands` | Num of processed commands of all BPServiceActors | +| `ProcessedCommandsOpNumOps` | Total number of processed commands operations | +| `ProcessedCommandsOpAvgTime` | Average time of processed commands operations in milliseconds | FsVolume - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17205. HdfsServerConstants.MIN_BLOCKS_FOR_WRITE should be configurable (#6112). Contributed by Haiyang Hu
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 4c408a557f14 HDFS-17205. HdfsServerConstants.MIN_BLOCKS_FOR_WRITE should be configurable (#6112). Contributed by Haiyang Hu 4c408a557f14 is described below commit 4c408a557f149dfb91ae4ea187d18638cd12dd77 Author: huhaiyang AuthorDate: Fri Oct 6 15:09:23 2023 +0800 HDFS-17205. HdfsServerConstants.MIN_BLOCKS_FOR_WRITE should be configurable (#6112). Contributed by Haiyang Hu Reviewed-by: He Xiaoqiao Signed-off-by: Ayush Saxena --- .../java/org/apache/hadoop/hdfs/DFSConfigKeys.java | 6 +++ .../hdfs/server/blockmanagement/BlockManager.java | 12 ++ .../blockmanagement/BlockPlacementPolicy.java | 10 + .../BlockPlacementPolicyDefault.java | 20 - .../server/blockmanagement/DatanodeDescriptor.java | 7 ++-- .../hdfs/server/common/HdfsServerConstants.java| 2 + .../hadoop/hdfs/server/namenode/NameNode.java | 19 - .../src/main/resources/hdfs-default.xml| 9 + .../blockmanagement/TestReplicationPolicy.java | 23 +++ .../server/namenode/TestNameNodeReconfigure.java | 47 ++ .../org/apache/hadoop/hdfs/tools/TestDFSAdmin.java | 12 +++--- 11 files changed, 155 insertions(+), 12 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java index f0450b0778da..8a3b9b9bda47 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants.StoragePolicySatisfierMode; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicyDefault; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicyRackFaultTolerant; +import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.RamDiskReplicaLruTracker; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.ReservedSpaceCalculator; import org.apache.hadoop.hdfs.web.URLConnectionFactory; @@ -1270,6 +1271,11 @@ public class DFSConfigKeys extends CommonConfigurationKeys { DFS_NAMENODE_BLOCKPLACEMENTPOLICY_EXCLUDE_SLOW_NODES_ENABLED_DEFAULT = false; + public static final String DFS_NAMENODE_BLOCKPLACEMENTPOLICY_MIN_BLOCKS_FOR_WRITE_KEY = + "dfs.namenode.block-placement.min-blocks-for.write"; + public static final int DFS_NAMENODE_BLOCKPLACEMENTPOLICY_MIN_BLOCKS_FOR_WRITE_DEFAULT = + HdfsServerConstants.MIN_BLOCKS_FOR_WRITE; + public static final String DFS_NAMENODE_GC_TIME_MONITOR_ENABLE = "dfs.namenode.gc.time.monitor.enable"; public static final boolean DFS_NAMENODE_GC_TIME_MONITOR_ENABLE_DEFAULT = diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 54b89c813010..783000bbef2f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -5689,4 +5689,16 @@ public class BlockManager implements BlockStatsMXBean { public boolean getExcludeSlowNodesEnabled(BlockType blockType) { return placementPolicies.getPolicy(blockType).getExcludeSlowNodesEnabled(); } + + public void setMinBlocksForWrite(int minBlocksForWrite) { +ensurePositiveInt(minBlocksForWrite, +DFS_NAMENODE_BLOCKPLACEMENTPOLICY_MIN_BLOCKS_FOR_WRITE_KEY); + placementPolicies.getPolicy(CONTIGUOUS).setMinBlocksForWrite(minBlocksForWrite); + placementPolicies.getPolicy(STRIPED).setMinBlocksForWrite(minBlocksForWrite); + } + + @VisibleForTesting + public int getMinBlocksForWrite(BlockType blockType) { +return placementPolicies.getPolicy(blockType).getMinBlocksForWrite(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java index 85468a57bde4..a37202630ac6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/se
[hadoop] branch trunk updated (2bf5a9ed118e -> 57100bba1bfd)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 2bf5a9ed118e HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133). Contributed by PJ Fanning add 57100bba1bfd HADOOP-18917. Addendum: Upgrade to commons-io 2.14.0 (#6152). Contributed by PJ Fanning No new revisions were added by this update. Summary of changes: .../fs/azurebfs/services/TestTextFileBasedIdentityHandler.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133). Contributed by PJ Fanning
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 2bf5a9ed118e HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133). Contributed by PJ Fanning 2bf5a9ed118e is described below commit 2bf5a9ed118e5830669db473447a7fd4a4037d97 Author: PJ Fanning AuthorDate: Thu Oct 5 21:28:21 2023 +0100 HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133). Contributed by PJ Fanning Signed-off-by: Ayush Saxena --- LICENSE-binary | 2 +- hadoop-project/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 1eecdf7dd11f..4916fda10923 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -247,7 +247,7 @@ commons-cli:commons-cli:1.5.0 commons-codec:commons-codec:1.11 commons-collections:commons-collections:3.2.2 commons-daemon:commons-daemon:1.0.13 -commons-io:commons-io:2.8.0 +commons-io:commons-io:2.14.0 commons-net:commons-net:3.9.0 de.ruedigermoeller:fst:2.50 io.grpc:grpc-api:1.26.0 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 4322a51cd2d4..3fe8f8fa8f4d 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -118,7 +118,7 @@ 3.2.2 1.24.0 1.9.0 -2.11.0 +2.14.0 3.12.0 1.1.3 3.6.1 - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (b8815fe68bdd -> b87180568b26)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from b8815fe68bdd MAPREDUCE-7453. Revert HADOOP-18649. (#6102). Contributed by zhengchenyu. add b87180568b26 HDFS-17209. Correct comments to align with the code (#6110). Contributed by Yu Wang. No new revisions were added by this update. Summary of changes: .../src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: MAPREDUCE-7453. Revert HADOOP-18649. (#6102). Contributed by zhengchenyu.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new b8815fe68bdd MAPREDUCE-7453. Revert HADOOP-18649. (#6102). Contributed by zhengchenyu. b8815fe68bdd is described below commit b8815fe68bddd670174b6f1fa19aff178dec7b59 Author: zhengchenyu AuthorDate: Sun Oct 1 19:55:32 2023 +0800 MAPREDUCE-7453. Revert HADOOP-18649. (#6102). Contributed by zhengchenyu. In container-log4j.properties, log4j.appender.{APPENDER}.MaxFileSize is set to ${yarn.app.container.log.filesize}, but yarn.app.container.log.filesize is 0 in default. So log is missing. This log is always rolling and only show the latest log. --- .../apache/hadoop/mapreduce/v2/util/MRApps.java| 5 +- .../src/main/resources/mapred-default.xml | 15 ++- .../apache/hadoop/yarn/ContainerLogAppender.java | 128 + .../hadoop/yarn/ContainerRollingLogAppender.java | 75 .../hadoop/yarn/TestContainerLogAppender.java | 48 .../src/main/resources/container-log4j.properties | 39 --- 6 files changed, 286 insertions(+), 24 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index 72dd48b09c2c..a3ccfd72d8ce 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -60,6 +60,8 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.util.ApplicationClassLoader; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.ContainerLogAppender; +import org.apache.hadoop.yarn.ContainerRollingLogAppender; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -586,7 +588,8 @@ public class MRApps extends Apps { /** * Add the JVM system properties necessary to configure - * {@link org.apache.log4j.RollingFileAppender}. + * {@link ContainerLogAppender} or + * {@link ContainerRollingLogAppender}. * * @param task for map/reduce, or null for app master * @param vargs the argument list to append to diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml index a6d68acda34a..9b0d8b563d7b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml @@ -840,8 +840,11 @@ yarn.app.mapreduce.task.container.log.backups 0 Number of backup files for task logs when using -RollingFileAppender (RFA). See -org.apache.log4j.RollingFileAppender.maxBackupIndex. +ContainerRollingLogAppender (CRLA). See +org.apache.log4j.RollingFileAppender.maxBackupIndex. By default, +ContainerLogAppender (CLA) is used, and container logs are not rolled. CRLA +is enabled for tasks when both mapreduce.task.userlog.limit.kb and +yarn.app.mapreduce.task.container.log.backups are greater than zero. @@ -849,8 +852,12 @@ yarn.app.mapreduce.am.container.log.backups 0 Number of backup files for the ApplicationMaster logs when using -RollingFileAppender (RFA). See -org.apache.log4j.RollingFileAppender.maxBackupIndex. +ContainerRollingLogAppender (CRLA). See +org.apache.log4j.RollingFileAppender.maxBackupIndex. By default, +ContainerLogAppender (CLA) is used, and container logs are not rolled. CRLA +is enabled for the ApplicationMaster when both +yarn.app.mapreduce.am.container.log.limit.kb and +yarn.app.mapreduce.am.container.log.backups are greater than zero. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ContainerLogAppender.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ContainerLogAppender.java new file mode 100644 index ..03a0078167f8 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ContainerLogAppender.java
[hadoop] branch trunk updated (390cd294f8c6 -> 8931393302c6)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 390cd294f8c6 HDFS-17211. Fix comments in the RemoteParam class. (#6124). Contributed hellosrc. add 8931393302c6 HDFS-17133: TestFsDatasetImpl missing null check when cleaning up (#6079). Contributed by ConfX. No new revisions were added by this update. Summary of changes: .../hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17211. Fix comments in the RemoteParam class. (#6124). Contributed hellosrc.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 390cd294f8c6 HDFS-17211. Fix comments in the RemoteParam class. (#6124). Contributed hellosrc. 390cd294f8c6 is described below commit 390cd294f8c698058656cddc68a30882a090dd0e Author: xiaojunxiang <65019264+hello...@users.noreply.github.com> AuthorDate: Fri Sep 29 14:25:59 2023 +0800 HDFS-17211. Fix comments in the RemoteParam class. (#6124). Contributed hellosrc. Reviewed-by: Xing Lin Signed-off-by: Ayush Saxena --- .../org/apache/hadoop/hdfs/server/federation/router/RemoteParam.java| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RemoteParam.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RemoteParam.java index 8b216d919ed0..b6b4ef731c27 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RemoteParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RemoteParam.java @@ -35,7 +35,7 @@ public class RemoteParam { /** * Constructs a default remote parameter. Always maps the value to the - * destination of the provided RemoveLocationContext. + * destination of the provided RemoteLocationContext. */ public RemoteParam() { this.paramMap = null; - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (2d871fab7888 -> 35c42e4039a6)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 2d871fab7888 MAPREDUCE-7456. Extend add-opens flag to container launch commands on JDK17 nodes. Contributed by Peter Szucs add 35c42e4039a6 HADOOP-18912. upgrade snappy-java to 1.1.10.4 (#6115). Contributed by PJ Fanning. No new revisions were added by this update. Summary of changes: LICENSE-binary | 2 +- hadoop-project/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (ad001c93cf9 -> 068d8c7e4db)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from ad001c93cf9 HDFS-17111. RBF: Optimize msync to only call nameservices that have observer reads enabled. (#5860). Contributed by Simbarashe Dzinamarira. add 068d8c7e4db HDFS-17115. HttpFS Add Support getErasureCodeCodecs API (#5875). Contributed by Hualong Zhang. No new revisions were added by this update. Summary of changes: .../hadoop/fs/http/client/HttpFSFileSystem.java| 12 + .../apache/hadoop/fs/http/server/FSOperations.java | 27 +++ .../fs/http/server/HttpFSParametersProvider.java | 1 + .../apache/hadoop/fs/http/server/HttpFSServer.java | 8 .../http/server/metrics/HttpFSServerMetrics.java | 5 ++ .../hadoop/fs/http/client/BaseTestHttpFSWith.java | 56 +- 6 files changed, 107 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (c04a17f1160 -> ad001c93cf9)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from c04a17f1160 HADOOP-18823. Add Labeler Github Action. (#5874). Contributed by Ayush Saxena. add ad001c93cf9 HDFS-17111. RBF: Optimize msync to only call nameservices that have observer reads enabled. (#5860). Contributed by Simbarashe Dzinamarira. No new revisions were added by this update. Summary of changes: .../federation/router/RouterClientProtocol.java| 12 +-- .../server/federation/router/RouterRpcClient.java | 16 +++-- .../federation/router/TestObserverWithRouter.java | 41 ++ 3 files changed, 64 insertions(+), 5 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HADOOP-18823. Add Labeler Github Action. (#5874). Contributed by Ayush Saxena.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new c04a17f1160 HADOOP-18823. Add Labeler Github Action. (#5874). Contributed by Ayush Saxena. c04a17f1160 is described below commit c04a17f1160e3dedcdf294d09f878136af75172a Author: Ayush Saxena AuthorDate: Tue Jul 25 03:04:49 2023 +0530 HADOOP-18823. Add Labeler Github Action. (#5874). Contributed by Ayush Saxena. Reviewed-by: He Xiaoqiao --- .github/labeler.yml | 57 +++ .github/workflows/labeler.yml | 40 ++ 2 files changed, 97 insertions(+) diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100755 index 000..a3fa437e0de --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,57 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# Pull Request Labeler Github Action Configuration: https://github.com/marketplace/actions/labeler + +trunk: + - '**' +INFRA: + - .asf.yaml + - .gitattributes + - .gitignore + - .github/** + - dev-support/** + - start-build-env.sh +BUILD: + - '**/pom.xml' +COMMON: + - hadoop-common-project/** +HDFS: + - hadoop-hdfs-project/** +RBF: + - hadoop-hdfs-project/hadoop-hdfs-rbf/** +NATIVE: + - hadoop-hdfs-project/hadoop-hdfs-native-client/** + - hadoop-common-project/hadoop-common/src/main/native/** +YARN: + - hadoop-yarn-project/** +MAPREDUCE: + - hadoop-mapreduce-project/** +DISTCP: + - hadoop-tools/hadoop-distcp/** +TOOLS: + - hadoop-tools/** +AWS: + - hadoop-tools/hadoop-aws/** +ABFS: + - hadoop-tools/hadoop-azure/** +DYNAMOMETER: + - hadoop-tools/hadoop-dynamometer/** +MAVEN-PLUGINS: + - hadoop-maven-plugins/** diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 000..f85aff05dda --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name: "Pull Request Labeler" +on: pull_request_target + +permissions: + contents: read + pull-requests: write + +jobs: + triage: +runs-on: ubuntu-latest +steps: + - uses: actions/checkout@v3 +with: + sparse-checkout: | +.github + - uses: actions/labeler@v4.3.0 +with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + sync-labels: true + configuration-path: .github/labeler.yml + dot: true \ No newline at end of file - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (49c98da8385 -> 9cfe9ccd262)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 49c98da8385 HDFS-17112. Show decommission duration in JMX and HTML. (#5866). Contributed by Shuyan Zhang. add 9cfe9ccd262 HDFS-17119. RBF: Logger fix for StateStoreMySQLImpl. (#5882). Contributed by Zhaohui Wang. No new revisions were added by this update. Summary of changes: .../hdfs/server/federation/store/driver/impl/StateStoreMySQLImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17074. Remove incorrect comment in TestRedudantBlocks#setup. (#5822). Contributed by farmmamba.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 130bd033d53 HDFS-17074. Remove incorrect comment in TestRedudantBlocks#setup. (#5822). Contributed by farmmamba. 130bd033d53 is described below commit 130bd033d53d7a1621045b2ee0e24111a7599c15 Author: hfutatzhanghb AuthorDate: Fri Jul 21 03:15:40 2023 +0800 HDFS-17074. Remove incorrect comment in TestRedudantBlocks#setup. (#5822). Contributed by farmmamba. Reviewed-by: zhangshuyan Signed-off-by: Ayush Saxena --- .../java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java | 1 - 1 file changed, 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java index 1a1fc16215a..f5d54d29fdc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java @@ -66,7 +66,6 @@ public class TestRedudantBlocks { public void setup() throws IOException { Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); -// disable block recovery conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1); conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1); SimulatedFSDataset.setFactory(conf); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (80fefd093f7 -> fbe9a292469)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 80fefd093f7 HDFS-17067 Use BlockingThreadPoolExecutorService for nnProbingThreadPool in ObserverReadProxy (#5803) add fbe9a292469 YARN-11540. Fix typo: form -> from (#5861). Contributed by Seokchan Yoon. No new revisions were added by this update. Summary of changes: .../apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17075. Reconfig disk balancer parameters for datanode (#5823). Contributed by Haiyang Hu.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new c44823dadb7 HDFS-17075. Reconfig disk balancer parameters for datanode (#5823). Contributed by Haiyang Hu. c44823dadb7 is described below commit c44823dadb73a3033f515329f70b2e3126fcb7be Author: huhaiyang AuthorDate: Sun Jul 16 13:57:31 2023 +0800 HDFS-17075. Reconfig disk balancer parameters for datanode (#5823). Contributed by Haiyang Hu. Signed-off-by: Ayush Saxena --- .../hadoop/hdfs/server/datanode/DataNode.java | 52 +++- .../hadoop/hdfs/server/datanode/DiskBalancer.java | 56 - .../datanode/TestDataNodeReconfiguration.java | 57 ++ .../server/diskbalancer/TestDiskBalancerRPC.java | 2 +- .../org/apache/hadoop/hdfs/tools/TestDFSAdmin.java | 2 +- 5 files changed, 163 insertions(+), 6 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 2096f18d31a..0ed1304cb8f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -77,6 +77,10 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MAX_SLOWDISKS_TO import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_STARTUP_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_DEFAULT; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DISK_BALANCER_ENABLED; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DISK_BALANCER_ENABLED_DEFAULT; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DISK_BALANCER_PLAN_VALID_INTERVAL; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DISK_BALANCER_PLAN_VALID_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_MAX_NUM_BLOCKS_TO_LOG_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_MAX_NUM_BLOCKS_TO_LOG_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_METRICS_LOGGER_PERIOD_SECONDS_DEFAULT; @@ -356,7 +360,9 @@ public class DataNode extends ReconfigurableBase DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_KEY, FS_DU_INTERVAL_KEY, FS_GETSPACEUSED_JITTER_KEY, - FS_GETSPACEUSED_CLASSNAME)); + FS_GETSPACEUSED_CLASSNAME, + DFS_DISK_BALANCER_ENABLED, + DFS_DISK_BALANCER_PLAN_VALID_INTERVAL)); public static final String METRICS_LOG_NAME = "DataNodeMetricsLog"; @@ -706,6 +712,9 @@ public class DataNode extends ReconfigurableBase case FS_GETSPACEUSED_JITTER_KEY: case FS_GETSPACEUSED_CLASSNAME: return reconfDfsUsageParameters(property, newVal); +case DFS_DISK_BALANCER_ENABLED: +case DFS_DISK_BALANCER_PLAN_VALID_INTERVAL: + return reconfDiskBalancerParameters(property, newVal); default: break; } @@ -951,6 +960,44 @@ public class DataNode extends ReconfigurableBase } } + private String reconfDiskBalancerParameters(String property, String newVal) + throws ReconfigurationException { +String result = null; +try { + LOG.info("Reconfiguring {} to {}", property, newVal); + if (property.equals(DFS_DISK_BALANCER_ENABLED)) { +if (newVal != null && !newVal.equalsIgnoreCase("true") +&& !newVal.equalsIgnoreCase("false")) { + throw new IllegalArgumentException("Not a valid Boolean value for " + property); +} +boolean enable = (newVal == null ? DFS_DISK_BALANCER_ENABLED_DEFAULT : +Boolean.parseBoolean(newVal)); +getDiskBalancer().setDiskBalancerEnabled(enable); +result = Boolean.toString(enable); + } else if (property.equals(DFS_DISK_BALANCER_PLAN_VALID_INTERVAL)) { +if (newVal == null) { + // set to default + long defaultInterval = getConf().getTimeDuration( + DFS_DISK_BALANCER_PLAN_VALID_INTERVAL, + DFS_DISK_BALANCER_PLAN_VALID_INTERVAL_DEFAULT, + TimeUnit.MILLISECONDS); + getDiskBalancer().setPlanValidityInterval(defaultInterval); + result = DFS_DISK_BALANCER_PLAN_VALID_INTERVAL_DEFAULT; +} else { + long newInterval = getConf() + .getTimeDurationHelper(DFS_DISK_BALANCER_PLAN_VALID_INTERVAL, + newVal, TimeUnit.MILLISECONDS); + getDiskBalancer(
[hadoop] branch trunk updated: HDFS-17086. Fix the parameter settings in TestDiskspaceQuotaUpdate#updateCountForQuota (#5842). Contributed by Haiyang Hu.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 8dd9c874e14 HDFS-17086. Fix the parameter settings in TestDiskspaceQuotaUpdate#updateCountForQuota (#5842). Contributed by Haiyang Hu. 8dd9c874e14 is described below commit 8dd9c874e1478b653f9610a923b5bcaf9a422b4a Author: huhaiyang AuthorDate: Sat Jul 15 14:30:07 2023 +0800 HDFS-17086. Fix the parameter settings in TestDiskspaceQuotaUpdate#updateCountForQuota (#5842). Contributed by Haiyang Hu. Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- .../apache/hadoop/hdfs/server/namenode/TestDiskspaceQuotaUpdate.java| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDiskspaceQuotaUpdate.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDiskspaceQuotaUpdate.java index 771caefd20a..0cf696f504b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDiskspaceQuotaUpdate.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDiskspaceQuotaUpdate.java @@ -394,7 +394,7 @@ public class TestDiskspaceQuotaUpdate { FSNamesystem fsn = cluster.getNamesystem(); fsn.writeLock(); try { - getFSDirectory().updateCountForQuota(1); + getFSDirectory().updateCountForQuota(i); } finally { fsn.writeUnlock(); } - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (e7d74f3d594 -> c457c445b77)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from e7d74f3d594 HADOOP-18291. S3A prefetch - Implement thread-safe LRU cache for SingleFilePerBlockCache (#5754) add c457c445b77 HDFS-17068. Datanode should record last directory scan time. (#5809). Contributed by farmmamba. No new revisions were added by this update. Summary of changes: .../hadoop/hdfs/server/datanode/DirectoryScanner.java | 1 + .../hdfs/server/datanode/fsdataset/FsDatasetSpi.java | 6 ++ .../server/datanode/fsdataset/impl/FsDatasetImpl.java | 13 - .../server/datanode/metrics/DataNodeMetricHelper.java | 7 --- .../hdfs/server/datanode/metrics/FSDatasetMBean.java | 5 + .../hdfs/server/datanode/SimulatedFSDataset.java | 10 ++ .../hdfs/server/datanode/TestDirectoryScanner.java| 19 +++ .../datanode/extdataset/ExternalDatasetImpl.java | 6 ++ .../datanode/fsdataset/impl/TestFsDatasetImpl.java| 8 +++- 9 files changed, 70 insertions(+), 5 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17081. EC: Add logic for striped blocks in isSufficientlyReplicated (#5833). Contributed by Haiyang Hu.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new c1d19c39517 HDFS-17081. EC: Add logic for striped blocks in isSufficientlyReplicated (#5833). Contributed by Haiyang Hu. c1d19c39517 is described below commit c1d19c39517df1944c01b90ae738239b465f6664 Author: huhaiyang AuthorDate: Fri Jul 14 14:26:54 2023 +0800 HDFS-17081. EC: Add logic for striped blocks in isSufficientlyReplicated (#5833). Contributed by Haiyang Hu. Signed-off-by: Ayush Saxena --- .../org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index fab3619cb2c..e09fe7fbd9b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -1696,7 +1696,7 @@ public class BlockManager implements BlockStatsMXBean { public boolean isSufficientlyReplicated(BlockInfo b) { // Compare against the lesser of the minReplication and number of live DNs. final int liveReplicas = countNodes(b).liveReplicas(); -if (liveReplicas >= minReplication) { +if (hasMinStorage(b, liveReplicas)) { return true; } // getNumLiveDataNodes() is very expensive and we minimize its use by - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (4c8d048f256 -> dfb351c3a8c)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 4c8d048f256 HDFS-17082. Add documentation for provisionSnapshotTrash command to HDFSCommands.md and HdfsSnapshots.md" (#5834). Contributed by Haiyang Hu. add dfb351c3a8c HDFS-17083. Support getErasureCodeCodecs API in WebHDFS (#5836). Contributed by Hualong Zhang. No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/hdfs/web/JsonUtilClient.java | 10 +++ .../apache/hadoop/hdfs/web/WebHdfsFileSystem.java | 13 + .../hadoop/hdfs/web/resources/GetOpParam.java | 1 + .../web/resources/NamenodeWebHdfsMethods.java | 6 .../hadoop-hdfs/src/site/markdown/WebHDFS.md | 33 ++ .../org/apache/hadoop/hdfs/web/TestWebHDFS.java| 24 6 files changed, 87 insertions(+) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (325f7e5fdb4 -> 4c8d048f256)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 325f7e5fdb4 YARN-11529 Add metrics for ContainerMonitorImpl. (#5828) add 4c8d048f256 HDFS-17082. Add documentation for provisionSnapshotTrash command to HDFSCommands.md and HdfsSnapshots.md" (#5834). Contributed by Haiyang Hu. No new revisions were added by this update. Summary of changes: .../hadoop-hdfs/src/site/markdown/HDFSCommands.md| 2 ++ .../hadoop-hdfs/src/site/markdown/HdfsSnapshots.md | 16 2 files changed, 18 insertions(+) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HADOOP-18794. ipc.server.handler.queue.size missing from core-default.xml (#5819). Contributed by WangYuanben.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 6843f8e4e0f HADOOP-18794. ipc.server.handler.queue.size missing from core-default.xml (#5819). Contributed by WangYuanben. 6843f8e4e0f is described below commit 6843f8e4e0f174e3df4cfd06d8bb65cfce31eaa8 Author: WangYuanben <48795318+yuanbenw...@users.noreply.github.com> AuthorDate: Tue Jul 11 19:09:50 2023 +0800 HADOOP-18794. ipc.server.handler.queue.size missing from core-default.xml (#5819). Contributed by WangYuanben. Reviewed-by: Hualong Zhang Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- .../hadoop-common/src/main/resources/core-default.xml| 9 + 1 file changed, 9 insertions(+) diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 438e2df1372..dd543deb8a5 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -2321,6 +2321,15 @@ The switch to turn S3A auditing on or off. + + ipc.server.handler.queue.size + 100 + +Indicates how many calls per handler are allowed in the queue. This value can +determine the maximum call queue size by multiplying the number of handler threads. + + + ipc.server.listen.queue.size 256 - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (a822a3c70ba -> 4e699f03835)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from a822a3c70ba YARN-11528. Lock triple-beam to the version compatible with node.js 12 to avoid compilation error. (#5827). Contributed by Masatake Iwasaki add 4e699f03835 HDFS-17076. Remove the unused method isSlownodeByNameserviceId in DataNode (#5824). Contributed by Haiyang Hu. No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/hdfs/server/datanode/BlockPoolManager.java | 7 --- .../main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java | 4 2 files changed, 11 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch branch-3.3 updated: YARN-11528. Lock triple-beam to the version compatible with node.js 12 to avoid compilation error. (#5827). Contributed by Masatake Iwasaki
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-3.3 by this push: new b85b7b55a5a YARN-11528. Lock triple-beam to the version compatible with node.js 12 to avoid compilation error. (#5827). Contributed by Masatake Iwasaki b85b7b55a5a is described below commit b85b7b55a5a3443b437efe04de186aab00d0deb9 Author: Masatake Iwasaki AuthorDate: Tue Jul 11 15:38:09 2023 +0900 YARN-11528. Lock triple-beam to the version compatible with node.js 12 to avoid compilation error. (#5827). Contributed by Masatake Iwasaki Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- .../hadoop-yarn-applications-catalog-webapp/package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json index f09442cfc4e..59cc3da179f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json @@ -19,6 +19,9 @@ "shelljs": "^0.2.6", "apidoc": "0.17.7" }, +"resolutions": { +"triple-beam": "1.3.0" +}, "scripts": { "prestart": "npm install & mvn clean package", "pretest": "npm install" - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: YARN-11528. Lock triple-beam to the version compatible with node.js 12 to avoid compilation error. (#5827). Contributed by Masatake Iwasaki
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new a822a3c70ba YARN-11528. Lock triple-beam to the version compatible with node.js 12 to avoid compilation error. (#5827). Contributed by Masatake Iwasaki a822a3c70ba is described below commit a822a3c70bac8bb25b7b5b926030a2cd9499f52e Author: Masatake Iwasaki AuthorDate: Tue Jul 11 15:38:09 2023 +0900 YARN-11528. Lock triple-beam to the version compatible with node.js 12 to avoid compilation error. (#5827). Contributed by Masatake Iwasaki Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- .../hadoop-yarn-applications-catalog-webapp/package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json index f09442cfc4e..59cc3da179f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/package.json @@ -19,6 +19,9 @@ "shelljs": "^0.2.6", "apidoc": "0.17.7" }, +"resolutions": { +"triple-beam": "1.3.0" +}, "scripts": { "prestart": "npm install & mvn clean package", "pretest": "npm install" - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (b673ebfec25 -> a84284e9742)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from b673ebfec25 HDFS-17070. Remove unused import in DataNodeMetricHelper.java. (#5812). Contributed by farmmamba. add a84284e9742 HDFS-17064. Document the usage of the new Balancer "sortTopNodes" and "hotBlockTimeInterval" parameter (#5794). Contributed by Haiyang Hu. No new revisions were added by this update. Summary of changes: .../main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java| 4 ++-- hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSCommands.md | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17070. Remove unused import in DataNodeMetricHelper.java. (#5812). Contributed by farmmamba.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new b673ebfec25 HDFS-17070. Remove unused import in DataNodeMetricHelper.java. (#5812). Contributed by farmmamba. b673ebfec25 is described below commit b673ebfec25082c83d9144b6267dee202d27c45f Author: hfutatzhanghb AuthorDate: Mon Jul 10 15:28:23 2023 +0800 HDFS-17070. Remove unused import in DataNodeMetricHelper.java. (#5812). Contributed by farmmamba. Reviewed-by: Xing Lin Signed-off-by: Ayush Saxena --- .../apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java | 1 - 1 file changed, 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java index 8bbe08bc053..b1a2d4f956c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.server.datanode.metrics; -import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsTag; import org.apache.hadoop.metrics2.lib.Interns; - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (750c0fc6318 -> 5a35fb5a726)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 750c0fc6318 HDFS-17052. Improve BlockPlacementPolicyRackFaultTolerant to avoid choose nodes failed when no enough Rack. (#5759). Contributed by Hualong Zhang and Shuyan Zhang. add 5a35fb5a726 HADOOP-18783. Upgrade to netty 4.1.94 due to CVE (#5774). Contributed by PJ Fanning. No new revisions were added by this update. Summary of changes: LICENSE-binary | 60 +- hadoop-project/pom.xml | 2 +- 2 files changed, 31 insertions(+), 31 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17062. HA NameNode Web UI should show last HA Transition time (#5791). Contributed by Zhaohui Wang.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 8fb611c1f70 HDFS-17062. HA NameNode Web UI should show last HA Transition time (#5791). Contributed by Zhaohui Wang. 8fb611c1f70 is described below commit 8fb611c1f7098b5e01886937591f6642d9bbc37d Author: wangzhaohui <32935220+wzhallri...@users.noreply.github.com> AuthorDate: Fri Jun 30 11:26:34 2023 +0800 HDFS-17062. HA NameNode Web UI should show last HA Transition time (#5791). Contributed by Zhaohui Wang. Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html index 889bc8aff51..4ba6139ad33 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html @@ -196,6 +196,9 @@ {#fsn} Last Checkpoint Time{@if cond="{LastCheckpointTime} === 0"}Never{:else}{LastCheckpointTime|date_tostring}{/if} {/fsn} +{#nnstat} + Last HA Transition Time{@if cond="{LastHATransitionTime} === 0"}Never{:else}{LastHATransitionTime|date_tostring}{/if} +{/nnstat} {#ecstat} Enabled Erasure Coding Policies{EnabledEcPolicies} {/ecstat} - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HADOOP-18713. Update solr from 8.8.2 to 8.11.2 (#5459). Contributed by Xuesen Liang.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new cebcb44d377 HADOOP-18713. Update solr from 8.8.2 to 8.11.2 (#5459). Contributed by Xuesen Liang. cebcb44d377 is described below commit cebcb44d377710db6d4cff45f0b956f5a3372f51 Author: liangxs AuthorDate: Thu Jun 22 15:45:57 2023 +0800 HADOOP-18713. Update solr from 8.8.2 to 8.11.2 (#5459). Contributed by Xuesen Liang. Reviewed-by: Wei-Chiu Chuang Signed-off-by: Ayush Saxena --- LICENSE-binary | 2 +- hadoop-project/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 5d9b6f8a2d8..896e43a17c2 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -337,7 +337,7 @@ org.apache.kerby:kerby-pkix:2.0.3 org.apache.kerby:kerby-util:2.0.3 org.apache.kerby:kerby-xdr:2.0.3 org.apache.kerby:token-provider:2.0.3 -org.apache.solr:solr-solrj:8.8.2 +org.apache.solr:solr-solrj:8.11.2 org.apache.yetus:audience-annotations:0.5.0 org.apache.zookeeper:zookeeper:3.6.3 org.codehaus.jettison:jettison:1.5.4 diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 31cf1745edb..486a40d48a7 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -205,7 +205,7 @@ 3.12.2 3.9.0 1.5.6 -8.8.2 +8.11.2 1.1.3.Final 1.0.2 5.4.0 - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (7c54a783435 -> e14c52c25f2)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 7c54a783435 YARN-11506. The formatted yarn queue list is displayed on CLI (#5716). Contributed by Lu Yuan. add e14c52c25f2 HDFS-17053. Optimize method BlockInfoStriped#findSlot to reduce time complexity. (#5757). Contributed by farmmamba. No new revisions were added by this update. Summary of changes: .../apache/hadoop/hdfs/server/blockmanagement/BlockInfoStriped.java| 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: YARN-11506. The formatted yarn queue list is displayed on CLI (#5716). Contributed by Lu Yuan.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 7c54a783435 YARN-11506. The formatted yarn queue list is displayed on CLI (#5716). Contributed by Lu Yuan. 7c54a783435 is described below commit 7c54a78343566716a8991a67b619fb0811c66dc7 Author: yl09099 <33595968+yl09...@users.noreply.github.com> AuthorDate: Sun Jun 18 17:09:25 2023 +0800 YARN-11506. The formatted yarn queue list is displayed on CLI (#5716). Contributed by Lu Yuan. Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- .../apache/hadoop/yarn/client/cli/QueueCLI.java| 35 ++- .../yarn/client/util/FormattingCLIUtils.java | 277 + .../apache/hadoop/yarn/client/cli/TestYarnCLI.java | 35 ++- 3 files changed, 307 insertions(+), 40 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java index c8d71514e21..2df7aeb8d0f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java @@ -23,6 +23,7 @@ import java.io.PrintWriter; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; +import java.util.Arrays; import java.util.List; import java.util.Set; @@ -36,6 +37,7 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.QueueInfo; +import org.apache.hadoop.yarn.client.util.FormattingCLIUtils; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.classification.VisibleForTesting; @@ -222,27 +224,20 @@ public class QueueCLI extends YarnCLI { } private void printQueueInfos(PrintWriter writer, List queueInfos) { -writer.print(queueInfos.size() + " queues were found : \n"); -writer.print("Queue Name\tQueue Path\tState\tCapacity\tCurrent Capacity" + -"\tMaximum Capacity\tWeight\tMaximum Parallel Apps\n"); +String titleString = queueInfos.size() + " queues were found"; +List headerStrings = Arrays.asList("Queue Name", "Queue Path", "State", "Capacity", +"Current Capacity", "Maximum Capacity", "Weight", "Maximum Parallel Apps"); +FormattingCLIUtils formattingCLIUtils = new FormattingCLIUtils(titleString) +.addHeaders(headerStrings); +DecimalFormat df = new DecimalFormat("#.00"); for (QueueInfo queueInfo : queueInfos) { - writer.print(queueInfo.getQueueName()); - writer.print("\t"); - writer.print(queueInfo.getQueuePath()); - writer.print("\t"); - writer.print(queueInfo.getQueueState()); - DecimalFormat df = new DecimalFormat("#.00"); - writer.print("\t"); - writer.print(df.format(queueInfo.getCapacity() * 100) + "%"); - writer.print("\t"); - writer.print(df.format(queueInfo.getCurrentCapacity() * 100) + "%"); - writer.print("\t"); - writer.print(df.format(queueInfo.getMaximumCapacity() * 100) + "%"); - writer.print("\t"); - writer.print(df.format(queueInfo.getWeight())); - writer.print("\t"); - writer.print(queueInfo.getMaxParallelApps()); - writer.print("\n"); + formattingCLIUtils.addLine(queueInfo.getQueueName(), queueInfo.getQueuePath(), + queueInfo.getQueueState(), df.format(queueInfo.getCapacity() * 100) + "%", + df.format(queueInfo.getCurrentCapacity() * 100) + "%", + df.format(queueInfo.getMaximumCapacity() * 100) + "%", + df.format(queueInfo.getWeight()), + queueInfo.getMaxParallelApps()); } +writer.print(formattingCLIUtils.render()); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/FormattingCLIUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/FormattingCLIUtils.java new file mode 100644 index 000..19fe3927abf --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/FormattingCLIUtils.java @@ -0,0 +1,277 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one +
[hadoop] branch trunk updated (427366b73bf -> 9a7d1b49e2f)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 427366b73bf HDFS-17042 Add rpcCallSuccesses and OverallRpcProcessingTime to RpcMetrics for Namenode (#5730) add 9a7d1b49e2f HDFS-17043. HttpFS implementation for getAllErasureCodingPolicies (#5734). Contributed by Hualong Zhang. No new revisions were added by this update. Summary of changes: .../hadoop/fs/http/client/HttpFSFileSystem.java| 13 +++ .../apache/hadoop/fs/http/server/FSOperations.java | 27 ++ .../fs/http/server/HttpFSParametersProvider.java | 1 + .../apache/hadoop/fs/http/server/HttpFSServer.java | 8 + .../http/server/metrics/HttpFSServerMetrics.java | 5 +++ .../hadoop/fs/http/client/BaseTestHttpFSWith.java | 42 -- 6 files changed, 94 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (2fe3b2a73f6 -> 02027c8dccc)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 2fe3b2a73f6 HADOOP-18763. Upgrade aws-java-sdk to 1.12.367 (#5741) add 02027c8dccc HDFS-17051. Fix wrong time unit in TestFileAppend4#recoverFile (#5749). Contributed by Zhaohui Wang. No new revisions were added by this update. Summary of changes: .../src/test/java/org/apache/hadoop/hdfs/TestFileAppend4.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-17047. BlockManager#addStoredBlock should log storage id when AddBlockResult is REPLACED (#5742). Contributed by farmmamba.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 1a918a6b960 HDFS-17047. BlockManager#addStoredBlock should log storage id when AddBlockResult is REPLACED (#5742). Contributed by farmmamba. 1a918a6b960 is described below commit 1a918a6b960570935338123e5413e64017be6d54 Author: hfutatzhanghb AuthorDate: Thu Jun 15 02:54:22 2023 +0800 HDFS-17047. BlockManager#addStoredBlock should log storage id when AddBlockResult is REPLACED (#5742). Contributed by farmmamba. Signed-off-by: Ayush Saxena --- .../org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java| 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 9133515afbc..16b79539fdd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -3645,7 +3645,8 @@ public class BlockManager implements BlockStatsMXBean { } else if (result == AddBlockResult.REPLACED) { curReplicaDelta = 0; blockLog.warn("BLOCK* addStoredBlock: block {} moved to storageType " + - "{} on node {}", reportedBlock, storageInfo.getStorageType(), node); + "{} on node {} storageId {}, reportedBlock is {}", reportedBlock, + storageInfo.getStorageType(), node, storageInfo.getStorageID(), reportedBlock); } else { // if the same block is added again and the replica was corrupt // previously because of a wrong gen stamp, remove it from the - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-16946. Fix getTopTokenRealOwners to return String (#5696). Contributed by Nishtha Shah.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 3fbadc5d505 HDFS-16946. Fix getTopTokenRealOwners to return String (#5696). Contributed by Nishtha Shah. 3fbadc5d505 is described below commit 3fbadc5d505d95c5bf968dc634d1b4d3416baddd Author: NishthaShah AuthorDate: Tue Jun 13 10:01:35 2023 +0530 HDFS-16946. Fix getTopTokenRealOwners to return String (#5696). Contributed by Nishtha Shah. Reviewed-by: Inigo Goiri Signed-off-by: Ayush Saxena --- .../hdfs/server/federation/metrics/RBFMetrics.java | 17 +-- .../security/TestRouterSecurityManager.java| 53 -- 2 files changed, 62 insertions(+), 8 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/metrics/RBFMetrics.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/metrics/RBFMetrics.java index e1068394f6f..41b58c4a16b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/metrics/RBFMetrics.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/metrics/RBFMetrics.java @@ -81,12 +81,14 @@ import org.apache.hadoop.hdfs.server.federation.store.records.MembershipStats; import org.apache.hadoop.hdfs.server.federation.store.records.MountTable; import org.apache.hadoop.hdfs.server.federation.store.records.RouterState; import org.apache.hadoop.hdfs.server.federation.store.records.StateStoreVersion; +import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.util.MBeans; +import org.apache.hadoop.metrics2.util.Metrics2Util; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.VersionInfo; @@ -712,13 +714,18 @@ public class RBFMetrics implements RouterMBean, FederationMBean { @Override public String getTopTokenRealOwners() { -RouterSecurityManager mgr = -this.router.getRpcServer().getRouterSecurityManager(); +String topTokenRealOwnersString = ""; +RouterSecurityManager mgr = this.router.getRpcServer().getRouterSecurityManager(); if (mgr != null && mgr.getSecretManager() != null) { - return JSON.toString(mgr.getSecretManager() - .getTopTokenRealOwners(this.topTokenRealOwners)); + try { +List topOwners = mgr.getSecretManager() +.getTopTokenRealOwners(this.topTokenRealOwners); +topTokenRealOwnersString = JsonUtil.toJsonString(topOwners); + } catch (Exception e) { +LOG.error("Unable to fetch the top token real owners as string {}", e.getMessage()); + } } -return ""; +return topTokenRealOwnersString; } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/security/TestRouterSecurityManager.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/security/TestRouterSecurityManager.java index d62837ccb13..9b2b5a06588 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/security/TestRouterSecurityManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/security/TestRouterSecurityManager.java @@ -18,16 +18,21 @@ package org.apache.hadoop.hdfs.server.federation.security; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.router.RouterHDFSContract; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; +import org.apache.hadoop.hdfs.server.federation.FederationTestUtils; import org.apache.hadoop.hdfs.server.federation.RouterConfigBuilder; +import org.apache.hadoop.hdfs.server.federation.metrics.RouterMBean; import org.apache.hadoop.hdfs.server.federation.router.security.RouterSecurityManager; import org.apache.hadoop.hdfs.server.federation.router.Router; import org.apache.hadoop.hdfs.server.federation.router.security.token.ZKDelegationTokenSecretManagerImpl; import org.apache.hadoop.io.Text; +import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.ut
[hadoop] branch trunk updated (35158db7115 -> fb16e00da0e)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 35158db7115 HDFS-17023. RBF: Record proxy time when call invokeConcurrent method. (#5683). Contributed by farmmamba. add fb16e00da0e HADOOP-18718. Fix several maven build warnings (#5592). Contributed by Dongjoon Hyun. No new revisions were added by this update. Summary of changes: .../hadoop-client-check-invariants/pom.xml | 1 - .../hadoop-client-check-test-invariants/pom.xml| 1 - hadoop-common-project/hadoop-common/pom.xml| 1 - hadoop-dist/pom.xml| 3 --- hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml| 13 +--- hadoop-hdfs-project/hadoop-hdfs/pom.xml| 4 ++-- .../hadoop-mapreduce-client-core/pom.xml | 4 ++-- hadoop-project-dist/pom.xml| 1 - hadoop-tools/hadoop-benchmark/pom.xml | 7 --- hadoop-tools/hadoop-distcp/pom.xml | 24 ++ hadoop-tools/hadoop-federation-balance/pom.xml | 24 ++ .../hadoop-yarn/hadoop-yarn-common/pom.xml | 4 ++-- .../hadoop-yarn-server-common/pom.xml | 8 pom.xml| 4 14 files changed, 31 insertions(+), 68 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (7a45ef41647 -> 35158db7115)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 7a45ef41647 MAPREDUCE-7435. Manifest Committer OOM on abfs (#5519) add 35158db7115 HDFS-17023. RBF: Record proxy time when call invokeConcurrent method. (#5683). Contributed by farmmamba. No new revisions were added by this update. Summary of changes: .../metrics/FederationRPCPerformanceMonitor.java | 8 ++-- .../server/federation/router/RouterRpcClient.java | 5 - .../metrics/TestNameserviceRPCMetrics.java | 23 ++ 3 files changed, 33 insertions(+), 3 deletions(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (0c209961f82 -> ddae78b0ec7)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 0c209961f82 HDFS-17019. Optimize the logic for reconfigure slow peer enable for Namenode" (#5671) add ddae78b0ec7 HDFS-17035. FsVolumeImpl#getActualNonDfsUsed may return negative value. (#5708). Contributed by farmmamba. No new revisions were added by this update. Summary of changes: .../apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated (5d6ca13c5cf -> d9980ab40ff)
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git from 5d6ca13c5cf HDFS-16983. Fix concat operation doesn't honor dfs.permissions.enabled (#5561). Contributed by caozhiqiang. add d9980ab40ff HDFS-17029. Support getECPolices API in WebHDFS (#5698). Contributed by Hualong Zhang. No new revisions were added by this update. Summary of changes: .../org/apache/hadoop/hdfs/web/JsonUtilClient.java | 32 ++ .../apache/hadoop/hdfs/web/WebHdfsFileSystem.java | 14 + .../hadoop/hdfs/web/resources/GetOpParam.java | 1 + .../web/resources/NamenodeWebHdfsMethods.java | 6 ++ .../java/org/apache/hadoop/hdfs/web/JsonUtil.java | 23 .../hadoop-hdfs/src/site/markdown/WebHDFS.md | 68 ++ .../org/apache/hadoop/hdfs/web/TestWebHDFS.java| 27 + 7 files changed, 171 insertions(+) - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
[hadoop] branch trunk updated: HDFS-16983. Fix concat operation doesn't honor dfs.permissions.enabled (#5561). Contributed by caozhiqiang.
This is an automated email from the ASF dual-hosted git repository. ayushsaxena pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 5d6ca13c5cf HDFS-16983. Fix concat operation doesn't honor dfs.permissions.enabled (#5561). Contributed by caozhiqiang. 5d6ca13c5cf is described below commit 5d6ca13c5cfb661caebb78978f4d58e723f031c6 Author: caozhiqiang AuthorDate: Mon Jun 5 19:12:59 2023 +0800 HDFS-16983. Fix concat operation doesn't honor dfs.permissions.enabled (#5561). Contributed by caozhiqiang. Reviewed-by: zhangshuyan Reviewed-by: He Xiaoqiao Signed-off-by: Ayush Saxena --- .../hadoop/hdfs/server/namenode/FSDirConcatOp.java | 2 +- .../hdfs/server/namenode/TestHDFSConcat.java | 95 ++ 2 files changed, 96 insertions(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirConcatOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirConcatOp.java index ea5ac38aa86..bc6b66af446 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirConcatOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirConcatOp.java @@ -121,7 +121,7 @@ class FSDirConcatOp { for(String src : srcs) { final INodesInPath iip = fsd.resolvePath(pc, src, DirOp.WRITE); // permission check for srcs - if (pc != null) { + if (pc != null && fsd.isPermissionEnabled()) { fsd.checkPathAccess(pc, iip, FsAction.READ); // read the file fsd.checkParentAccess(pc, iip, FsAction.WRITE); // for delete } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java index 1608a84168d..f2e9ec278ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -43,6 +44,7 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.LambdaTestUtils; @@ -564,4 +566,97 @@ public class TestHDFSConcat { assertEquals(1, dfs.getContentSummary(new Path(dir)).getFileCount()); } + + /** + * Verifies concat with wrong user when dfs.permissions.enabled is false. + * + * @throws IOException + */ + @Test + public void testConcatPermissionEnabled() throws Exception { +Configuration conf2 = new Configuration(); +conf2.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); +conf2.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true); +MiniDFSCluster cluster2 = new MiniDFSCluster.Builder(conf2).numDataNodes(REPL_FACTOR).build(); +try { + cluster2.waitClusterUp(); + DistributedFileSystem dfs2 = cluster2.getFileSystem(); + + String testPathDir = "/dir2"; + Path dir = new Path(testPathDir); + dfs2.mkdirs(dir); + Path trg = new Path(testPathDir, "trg"); + Path src = new Path(testPathDir, "src"); + DFSTestUtil.createFile(dfs2, trg, blockSize, REPL_FACTOR, 1); + DFSTestUtil.createFile(dfs2, src, blockSize, REPL_FACTOR, 1); + + // Check permissions with the wrong user when dfs.permissions.enabled is true. + final UserGroupInformation user = + UserGroupInformation.createUserForTesting("theDoctor", new String[] {"tardis"}); + DistributedFileSystem hdfs1 = + (DistributedFileSystem) DFSTestUtil.getFileSystemAs(user, conf2); + LambdaTestUtils.intercept(AccessControlException.class, + "Permission denied: user=theDoctor, access=WRITE", + () -> hdfs1.concat(trg, new Path[] {src})); + + conf2.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, false); + cluster2 = new MiniDFSCluster.Builder(conf2).numDataNodes(REPL_FACTOR).build(); + cluster2.waitClus