This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/orc.git
The following commit(s) were added to refs/heads/main by this push:
new d9da9bd3c ORC-1782: Upgrade Hadoop to 3.4.1
d9da9bd3c is described below
commit d9da9bd3c534fb05a698e867b44c8cad8c97e754
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Fri Oct 18 09:36:13 2024 -0700
ORC-1782: Upgrade Hadoop to 3.4.1
### What changes were proposed in this pull request?
This PR aims to upgrade Apache Hadoop to 3.4.1.
### Why are the changes needed?
To bring the latest bug fixes.
- http://hadoop.apache.org/docs/r3.4.1/index.html
-
http://hadoop.apache.org/docs/r3.4.1/hadoop-project-dist/hadoop-common/release/3.4.1/RELEASENOTES.3.4.1.html
-
http://hadoop.apache.org/docs/r3.4.1/hadoop-project-dist/hadoop-common/release/3.4.1/CHANGELOG.3.4.1.html
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #2039 from dongjoon-hyun/ORC-1782.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../org/apache/orc/bench/core/impl/ChunkReadUtilTest.java | 14 ++++++++++----
java/pom.xml | 2 +-
2 files changed, 11 insertions(+), 5 deletions(-)
diff --git
a/java/bench/core/src/test/org/apache/orc/bench/core/impl/ChunkReadUtilTest.java
b/java/bench/core/src/test/org/apache/orc/bench/core/impl/ChunkReadUtilTest.java
index 1169998d8..709192752 100644
---
a/java/bench/core/src/test/org/apache/orc/bench/core/impl/ChunkReadUtilTest.java
+++
b/java/bench/core/src/test/org/apache/orc/bench/core/impl/ChunkReadUtilTest.java
@@ -21,6 +21,9 @@ package org.apache.orc.bench.core.impl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.VersionInfo;
+import org.apache.orc.impl.HadoopShims;
+import org.apache.orc.impl.HadoopShimsFactory;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -38,6 +41,9 @@ class ChunkReadUtilTest {
private static long fileLength;
private static final int ROW_COUNT = 524288;
private static final int COL_COUNT = 16;
+ private static final HadoopShims SHIMS = HadoopShimsFactory.get();
+ private static final boolean supportVectoredIO =
+ SHIMS.supportVectoredIO(VersionInfo.getVersion());
@BeforeAll
public static void setup() throws IOException {
@@ -57,7 +63,7 @@ class ChunkReadUtilTest {
Configuration conf = new Configuration();
readStart();
assertEquals(ROW_COUNT, ChunkReadUtil.readORCFile(filePath, conf, false));
- assertTrue((readEnd().getBytesRead() / (double) fileLength) > 1);
+ assertTrue(supportVectoredIO || (readEnd().getBytesRead() / (double)
fileLength) > 1);
}
@Test
@@ -75,7 +81,7 @@ class ChunkReadUtilTest {
readStart();
assertEquals(ROW_COUNT, ChunkReadUtil.readORCFile(filePath, conf, true));
double readFraction = readEnd().getBytesRead() / (double) fileLength;
- assertTrue(readFraction > 1 && readFraction < 1.01);
+ assertTrue(supportVectoredIO || (readFraction > 1 && readFraction < 1.01));
}
@Test
@@ -85,6 +91,6 @@ class ChunkReadUtilTest {
readStart();
assertEquals(ROW_COUNT, ChunkReadUtil.readORCFile(filePath, conf, true));
double readFraction = readEnd().getBytesRead() / (double) fileLength;
- assertTrue(readFraction > 1 && readFraction < 1.01);
+ assertTrue(supportVectoredIO || (readFraction > 1 && readFraction < 1.01));
}
-}
\ No newline at end of file
+}
diff --git a/java/pom.xml b/java/pom.xml
index 979e01f34..b44261cad 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -64,7 +64,7 @@
<brotli4j.version>1.17.0</brotli4j.version>
<checkstyle.version>10.18.2</checkstyle.version>
<example.dir>${project.basedir}/../../examples</example.dir>
- <hadoop.version>3.4.0</hadoop.version>
+ <hadoop.version>3.4.1</hadoop.version>
<java.version>17</java.version>
<javadoc.location>${project.basedir}/../target/javadoc</javadoc.location>
<junit.version>5.11.2</junit.version>