This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/orc.git


The following commit(s) were added to refs/heads/main by this push:
     new 30ec6f7d9 ORC-1917: Add `TestConf` interface to centralize test 
configurations
30ec6f7d9 is described below

commit 30ec6f7d93e9309a78faa8ef1c1a4b6a32ba9c35
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Mon Jun 9 23:42:58 2025 -0700

    ORC-1917: Add `TestConf` interface to centralize test configurations
    
    ### What changes were proposed in this pull request?
    
    This PR aims to add a Java interface `TestConf` to centralize ORC test 
configurations.
    
    ### Why are the changes needed?
    
    1. To reduce the repetition
    2. To provide an centralized way to add common test configurations.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #2265 from dongjoon-hyun/ORC-1917.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../test/org/apache/orc/TestColumnStatistics.java  |  5 +--
 java/core/src/test/org/apache/orc/TestConf.java    | 39 ++++++++++++++++++++++
 .../src/test/org/apache/orc/TestMinSeekSize.java   |  5 +--
 .../org/apache/orc/TestNewIntegerEncoding.java     |  5 +--
 .../test/org/apache/orc/TestOrcDSTNoTimezone.java  |  5 +--
 .../test/org/apache/orc/TestOrcFilterContext.java  | 11 +++---
 .../src/test/org/apache/orc/TestOrcNoTimezone.java |  5 +--
 .../org/apache/orc/TestOrcNullOptimization.java    |  5 +--
 .../test/org/apache/orc/TestOrcTimestampPPD.java   |  5 +--
 .../src/test/org/apache/orc/TestOrcTimezone1.java  |  5 +--
 .../src/test/org/apache/orc/TestOrcTimezone2.java  |  5 +--
 .../src/test/org/apache/orc/TestOrcTimezone3.java  |  5 +--
 .../src/test/org/apache/orc/TestOrcTimezone4.java  |  5 +--
 .../test/org/apache/orc/TestOrcTimezonePPD.java    |  5 +--
 .../orc/TestOrcWithLargeStripeStatistics.java      |  6 ++--
 .../org/apache/orc/TestProlepticConversions.java   |  5 +--
 java/core/src/test/org/apache/orc/TestReader.java  |  5 +--
 .../apache/orc/TestRowFilteringComplexTypes.java   |  5 +--
 .../orc/TestRowFilteringComplexTypesNulls.java     |  5 +--
 .../org/apache/orc/TestRowFilteringIOSkip.java     |  4 +--
 .../org/apache/orc/TestRowFilteringNoSkip.java     |  5 +--
 .../test/org/apache/orc/TestRowFilteringSkip.java  |  5 +--
 .../test/org/apache/orc/TestSelectedVector.java    |  5 +--
 .../test/org/apache/orc/TestStringDictionary.java  |  4 +--
 .../test/org/apache/orc/TestTypeDescription.java   |  4 +--
 java/core/src/test/org/apache/orc/TestUnicode.java |  5 +--
 .../test/org/apache/orc/TestUnrolledBitPack.java   |  5 +--
 .../src/test/org/apache/orc/TestVectorOrcFile.java |  5 +--
 .../src/test/org/apache/orc/impl/TestBitPack.java  |  6 ++--
 .../apache/orc/impl/TestColumnStatisticsImpl.java  |  5 ++-
 .../orc/impl/TestConvertTreeReaderFactory.java     | 13 ++------
 .../test/org/apache/orc/impl/TestCryptoUtils.java  |  6 ++--
 .../test/org/apache/orc/impl/TestEncryption.java   |  9 ++---
 .../org/apache/orc/impl/TestMemoryManager.java     |  7 ++--
 .../org/apache/orc/impl/TestOrcLargeStripe.java    |  8 ++---
 .../test/org/apache/orc/impl/TestReaderImpl.java   | 14 ++------
 .../org/apache/orc/impl/TestRecordReaderImpl.java  | 20 ++---------
 .../org/apache/orc/impl/TestSchemaEvolution.java   |  5 ++-
 .../test/org/apache/orc/impl/TestWriterImpl.java   |  8 ++---
 .../src/test/org/apache/orc/impl/TestZlib.java     |  5 ++-
 .../orc/impl/filter/TestPluginFilterService.java   | 10 +++---
 .../apache/orc/impl/filter/TestPluginFilters.java  | 10 ++----
 .../orc/util/TestStreamWrapperFileSystem.java      |  5 ++-
 43 files changed, 104 insertions(+), 205 deletions(-)

diff --git a/java/core/src/test/org/apache/orc/TestColumnStatistics.java 
b/java/core/src/test/org/apache/orc/TestColumnStatistics.java
index 12dc152ed..1cb0f90b3 100644
--- a/java/core/src/test/org/apache/orc/TestColumnStatistics.java
+++ b/java/core/src/test/org/apache/orc/TestColumnStatistics.java
@@ -20,7 +20,6 @@ package org.apache.orc;
 
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.commons.lang3.StringEscapeUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -51,7 +50,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
 /**
  * Test ColumnStatisticsImpl for ORC.
  */
-public class TestColumnStatistics {
+public class TestColumnStatistics implements TestConf {
 
   @Test
   public void testLongSumOverflow() {
@@ -746,13 +745,11 @@ public class TestColumnStatistics {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     conf.set("fs.file.impl.disable.cache", "true");
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir + File.separator +
diff --git a/java/core/src/test/org/apache/orc/TestConf.java 
b/java/core/src/test/org/apache/orc/TestConf.java
new file mode 100644
index 000000000..22e529a29
--- /dev/null
+++ b/java/core/src/test/org/apache/orc/TestConf.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.orc;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.jupiter.api.BeforeEach;
+
+/**
+ * A shared configuration for ORC tests.
+ */
+public interface TestConf {
+
+  Configuration conf = getNewConf();
+
+  @BeforeEach
+  default void clear() {
+    conf.clear();
+  }
+
+  private static Configuration getNewConf() {
+    Configuration conf = new Configuration();
+    return conf;
+  }
+}
diff --git a/java/core/src/test/org/apache/orc/TestMinSeekSize.java 
b/java/core/src/test/org/apache/orc/TestMinSeekSize.java
index 8e69bf678..0040501fc 100644
--- a/java/core/src/test/org/apache/orc/TestMinSeekSize.java
+++ b/java/core/src/test/org/apache/orc/TestMinSeekSize.java
@@ -18,7 +18,6 @@
 
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -40,13 +39,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestMinSeekSize {
+public class TestMinSeekSize implements TestConf {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestMinSeekSize.class);
   private static final Path workDir = new 
Path(System.getProperty("test.tmp.dir",
                                                                   "target" + 
File.separator + "test"
                                                                   + 
File.separator + "tmp"));
   private static final Path filePath = new Path(workDir, 
"min_seek_size_file.orc");
-  private static Configuration conf;
   private static FileSystem fs;
 
   private static final TypeDescription schema = TypeDescription.createStruct()
@@ -62,7 +60,6 @@ public class TestMinSeekSize {
 
   @BeforeAll
   public static void setup() throws IOException {
-    conf = new Configuration();
     fs = FileSystem.get(conf);
 
     LOG.info("Creating file {} with schema {}", filePath, schema);
diff --git a/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java 
b/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java
index 7e1b1aa89..75508c3ad 100644
--- a/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java
+++ b/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java
@@ -19,7 +19,6 @@ package org.apache.orc;
 
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Longs;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -42,7 +41,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestNewIntegerEncoding {
+public class TestNewIntegerEncoding implements TestConf {
 
   private static Stream<Arguments> data() {
     return Stream.of(
@@ -72,13 +71,11 @@ public class TestNewIntegerEncoding {
   Path workDir = new Path(System.getProperty("test.tmp.dir", "target"
       + File.separator + "test" + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile."
         + testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java 
b/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java
index eb9095d60..49529add6 100644
--- a/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java
+++ b/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -40,15 +39,13 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
  * and it was written from a time zone that observes DST for one of the 
timestamp
  * values stored ('2014-06-06 12:34:56.0').
  */
-public class TestOrcDSTNoTimezone {
-  Configuration conf;
+public class TestOrcDSTNoTimezone implements TestConf {
   FileSystem fs;
   SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S");
   static TimeZone defaultTimeZone = TimeZone.getDefault();
 
   @BeforeEach
   public void openFileSystem() throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
   }
 
diff --git a/java/core/src/test/org/apache/orc/TestOrcFilterContext.java 
b/java/core/src/test/org/apache/orc/TestOrcFilterContext.java
index 265956890..a8916a256 100644
--- a/java/core/src/test/org/apache/orc/TestOrcFilterContext.java
+++ b/java/core/src/test/org/apache/orc/TestOrcFilterContext.java
@@ -29,7 +29,6 @@ import 
org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.orc.impl.OrcFilterContextImpl;
@@ -47,7 +46,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestOrcFilterContext {
+public class TestOrcFilterContext implements TestConf {
   private final TypeDescription schema = TypeDescription.createStruct()
     .addField("f1", TypeDescription.createLong())
     .addField("f2", TypeDescription.createString())
@@ -74,7 +73,6 @@ public class TestOrcFilterContext {
                                            
TypeDescription.createList(TypeDescription.createChar()))
                 )
     );
-  private static Configuration configuration;
   private static FileSystem fileSystem;
   private static final Path workDir = new 
Path(System.getProperty("test.tmp.dir",
           "target" + File.separator + "test"
@@ -270,11 +268,10 @@ public class TestOrcFilterContext {
   }
 
   private void createAcidORCFile() throws IOException {
-    configuration = new Configuration();
-    fileSystem = FileSystem.get(configuration);
+    fileSystem = FileSystem.get(conf);
 
     try (Writer writer = OrcFile.createWriter(filePath,
-            OrcFile.writerOptions(configuration)
+            OrcFile.writerOptions(conf)
                     .fileSystem(fileSystem)
                     .overwrite(true)
                     .rowIndexStride(8192)
@@ -325,7 +322,7 @@ public class TestOrcFilterContext {
   }
 
   private void readSingleRowWithFilter(int id) throws IOException {
-    Reader reader = OrcFile.createReader(filePath, 
OrcFile.readerOptions(configuration).filesystem(fileSystem));
+    Reader reader = OrcFile.createReader(filePath, 
OrcFile.readerOptions(conf).filesystem(fileSystem));
     SearchArgument searchArgument = SearchArgumentFactory.newBuilder()
             .in("int1", PredicateLeaf.Type.LONG, new Long(id))
             .build();
diff --git a/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java 
b/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java
index 1b72e33e1..5a5373862 100644
--- a/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java
+++ b/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -39,15 +38,13 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
  * Test over an orc file that does not store time zone information in the 
footer
  * and it was written from a time zone that does not observe DST.
  */
-public class TestOrcNoTimezone {
-  Configuration conf;
+public class TestOrcNoTimezone implements TestConf {
   FileSystem fs;
   SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S");
   static TimeZone defaultTimeZone = TimeZone.getDefault();
 
   @BeforeEach
   public void openFileSystem() throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
   }
 
diff --git a/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java 
b/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java
index b509b8a25..79473063c 100644
--- a/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java
+++ b/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java
@@ -18,7 +18,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -43,7 +42,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestOrcNullOptimization {
+public class TestOrcNullOptimization implements TestConf {
 
   TypeDescription createMyStruct() {
     return TypeDescription.createStruct()
@@ -103,13 +102,11 @@ public class TestOrcNullOptimization {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcNullOptimization." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java 
b/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java
index 0803d890b..142c7423a 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java
@@ -18,7 +18,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -40,10 +39,9 @@ import java.util.TimeZone;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class TestOrcTimestampPPD {
+public class TestOrcTimestampPPD implements TestConf {
   Path workDir =
       new Path(System.getProperty("test.tmp.dir", "target" + File.separator + 
"test" + File.separator + "tmp"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
@@ -53,7 +51,6 @@ public class TestOrcTimestampPPD {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir,
         "TestOrcTimestampPPD." + testInfo.getTestMethod().get().getName() + 
".orc");
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone1.java 
b/java/core/src/test/org/apache/orc/TestOrcTimezone1.java
index fe871b9c4..e9ccb3831 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone1.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone1.java
@@ -18,7 +18,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -43,10 +42,9 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
 /**
  *
  */
-public class TestOrcTimezone1 {
+public class TestOrcTimezone1 implements TestConf {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
@@ -79,7 +77,6 @@ public class TestOrcTimezone1 {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone2.java 
b/java/core/src/test/org/apache/orc/TestOrcTimezone2.java
index 69b6d676b..488cc2d26 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone2.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone2.java
@@ -18,7 +18,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -42,10 +41,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 /**
  *
  */
-public class TestOrcTimezone2 {
+public class TestOrcTimezone2 implements TestConf {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
@@ -66,7 +64,6 @@ public class TestOrcTimezone2 {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone3.java 
b/java/core/src/test/org/apache/orc/TestOrcTimezone3.java
index 112d5dedd..f8a16b16b 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone3.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone3.java
@@ -18,7 +18,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -41,10 +40,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 /**
  *
  */
-public class TestOrcTimezone3 {
+public class TestOrcTimezone3 implements TestConf {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
@@ -55,7 +53,6 @@ public class TestOrcTimezone3 {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcTimezone3." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone4.java 
b/java/core/src/test/org/apache/orc/TestOrcTimezone4.java
index 8c06e473c..78892a926 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone4.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone4.java
@@ -18,7 +18,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -39,10 +38,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 /**
  *
  */
-public class TestOrcTimezone4 {
+public class TestOrcTimezone4 implements TestConf {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
@@ -53,7 +51,6 @@ public class TestOrcTimezone4 {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcTimezone4." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java 
b/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java
index 593e0a964..ea0af05af 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java
@@ -16,7 +16,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -54,12 +53,11 @@ import static org.junit.jupiter.api.Assertions.assertNull;
 /**
  *
  */
-public class TestOrcTimezonePPD {
+public class TestOrcTimezonePPD implements TestConf {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestOrcTimezonePPD.class);
 
   Path workDir = new Path(System.getProperty("test.tmp.dir",
     "target" + File.separator + "test" + File.separator + "tmp"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
@@ -90,7 +88,6 @@ public class TestOrcTimezonePPD {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
       testInfo.getTestMethod().get().getName() + ".orc");
diff --git 
a/java/core/src/test/org/apache/orc/TestOrcWithLargeStripeStatistics.java 
b/java/core/src/test/org/apache/orc/TestOrcWithLargeStripeStatistics.java
index 30b2604bf..9f86f017e 100644
--- a/java/core/src/test/org/apache/orc/TestOrcWithLargeStripeStatistics.java
+++ b/java/core/src/test/org/apache/orc/TestOrcWithLargeStripeStatistics.java
@@ -16,7 +16,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
@@ -41,7 +40,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
  * </p>
  */
 @Disabled("ORC-1361")
-public class TestOrcWithLargeStripeStatistics {
+public class TestOrcWithLargeStripeStatistics implements TestConf {
 
   @ParameterizedTest
   @EnumSource(value = OrcFile.Version.class, mode = EnumSource.Mode.EXCLUDE, 
names = "FUTURE")
@@ -49,7 +48,7 @@ public class TestOrcWithLargeStripeStatistics {
       throws Exception {
     // Use a size that exceeds the protobuf limit (e.g., 1GB) to trigger 
protobuf exception
     Path p = createOrcFile(1024L << 20, version);
-    try (Reader reader = OrcFile.createReader(p, OrcFile.readerOptions(new 
Configuration()))) {
+    try (Reader reader = OrcFile.createReader(p, OrcFile.readerOptions(conf))) 
{
       assertTrue(reader.getStripeStatistics().isEmpty());
     }
   }
@@ -75,7 +74,6 @@ public class TestOrcWithLargeStripeStatistics {
         TestOrcWithLargeStripeStatistics.class.getSimpleName()
             + "_" + ROW_STRIPE_NUM + "_" + version + ".orc");
     // Modify defaults to force one row per stripe.
-    Configuration conf = new Configuration();
     conf.set(OrcConf.ROWS_BETWEEN_CHECKS.getAttribute(), "0");
     TypeDescription schema = createTypeDescription();
     OrcFile.WriterOptions writerOptions =
diff --git a/java/core/src/test/org/apache/orc/TestProlepticConversions.java 
b/java/core/src/test/org/apache/orc/TestProlepticConversions.java
index ff983b3c8..ae8201c60 100644
--- a/java/core/src/test/org/apache/orc/TestProlepticConversions.java
+++ b/java/core/src/test/org/apache/orc/TestProlepticConversions.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -56,7 +55,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
  * This class tests all of the combinations of reading and writing the hybrid
  * and proleptic calendars.
  */
-public class TestProlepticConversions {
+public class TestProlepticConversions implements TestConf {
 
   private static Stream<Arguments> data() {
     return Stream.of(
@@ -69,12 +68,10 @@ public class TestProlepticConversions {
   private Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
 
-  private final Configuration conf;
   private final TimeZone UTC = TimeZone.getTimeZone("UTC");
   private final GregorianCalendar PROLEPTIC = new GregorianCalendar();
   private final GregorianCalendar HYBRID = new GregorianCalendar();
   {
-    conf = new Configuration();
     PROLEPTIC.setTimeZone(UTC);
     PROLEPTIC.setGregorianChange(new Date(Long.MIN_VALUE));
     HYBRID.setTimeZone(UTC);
diff --git a/java/core/src/test/org/apache/orc/TestReader.java 
b/java/core/src/test/org/apache/orc/TestReader.java
index d4b648f5e..f3c11d54a 100644
--- a/java/core/src/test/org/apache/orc/TestReader.java
+++ b/java/core/src/test/org/apache/orc/TestReader.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -30,16 +29,14 @@ import java.io.File;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
-public class TestReader {
+public class TestReader implements TestConf {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, TestReader.class.getSimpleName() + "." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git 
a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java 
b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java
index bebe3817c..0f6b76e62 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
@@ -39,11 +38,10 @@ import java.io.File;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestRowFilteringComplexTypes {
+public class TestRowFilteringComplexTypes implements TestConf {
     private Path workDir = new Path(System.getProperty("test.tmp.dir", 
"target" + File.separator + "test"
             + File.separator + "tmp"));
 
-    private Configuration conf;
     private FileSystem fs;
     private Path testFilePath;
 
@@ -51,7 +49,6 @@ public class TestRowFilteringComplexTypes {
 
     @BeforeEach
     public void openFileSystem(TestInfo testInfo) throws Exception {
-        conf = new Configuration();
         OrcConf.READER_USE_SELECTED.setBoolean(conf, true);
         fs = FileSystem.getLocal(conf);
         testFilePath = new Path(workDir,
diff --git 
a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java 
b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java
index c45c94e16..248e6c88d 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java
@@ -18,7 +18,6 @@
 
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
@@ -47,14 +46,13 @@ import java.util.function.Consumer;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestRowFilteringComplexTypesNulls {
+public class TestRowFilteringComplexTypesNulls implements TestConf {
   private static final Logger LOG =
     LoggerFactory.getLogger(TestRowFilteringComplexTypesNulls.class);
   private static final Path workDir = new 
Path(System.getProperty("test.tmp.dir",
                                                                   "target" + 
File.separator + "test"
                                                                   + 
File.separator + "tmp"));
   private static final Path filePath = new Path(workDir, 
"complex_null_file.orc");
-  private static Configuration conf;
   private static FileSystem fs;
 
   private static final TypeDescription schema = TypeDescription.createStruct()
@@ -75,7 +73,6 @@ public class TestRowFilteringComplexTypesNulls {
 
   @BeforeAll
   public static void setup() throws IOException {
-    conf = new Configuration();
     fs = FileSystem.get(conf);
 
     LOG.info("Creating file {} with schema {}", filePath, schema);
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java 
b/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java
index d0b19a9c0..fd32a431d 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java
@@ -48,13 +48,12 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestRowFilteringIOSkip {
+public class TestRowFilteringIOSkip implements TestConf {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestRowFilteringIOSkip.class);
   private static final Path workDir = new 
Path(System.getProperty("test.tmp.dir",
                                                                   "target" + 
File.separator + "test"
                                                                   + 
File.separator + "tmp"));
   private static final Path filePath = new Path(workDir, "skip_file.orc");
-  private static Configuration conf;
   private static FileSystem fs;
 
   private static final TypeDescription schema = TypeDescription.createStruct()
@@ -71,7 +70,6 @@ public class TestRowFilteringIOSkip {
 
   @BeforeAll
   public static void setup() throws IOException {
-    conf = new Configuration();
     fs = FileSystem.get(conf);
 
     LOG.info("Creating file {} with schema {}", filePath, schema);
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java 
b/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
index 87c390e8a..b4a677d86 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -42,12 +41,11 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
  * As it turns out it is more expensive to skip non-selected rows rather that 
just decode all and propagate the
  * selected array. Skipping for these type breaks instruction pipelining and 
introduces more branch mispredictions.
  */
-public class TestRowFilteringNoSkip {
+public class TestRowFilteringNoSkip implements TestConf {
 
   private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" 
+ File.separator + "test"
       + File.separator + "tmp"));
 
-  private Configuration conf;
   private FileSystem fs;
   private Path testFilePath;
 
@@ -55,7 +53,6 @@ public class TestRowFilteringNoSkip {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     OrcConf.READER_USE_SELECTED.setBoolean(conf, true);
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestRowFilteringNoSkip." +
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java 
b/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
index dafbd35d3..ea4bc583c 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -53,12 +52,11 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
  * Types that are skipped at row-level include: Decimal, Decimal64, Double, 
Float, Char, VarChar, String, Boolean, Timestamp
  * For the remaining types that are not row-skipped see {@link 
TestRowFilteringNoSkip}
  */
-public class TestRowFilteringSkip {
+public class TestRowFilteringSkip implements TestConf {
 
   private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" 
+ File.separator + "test"
       + File.separator + "tmp"));
 
-  private Configuration conf;
   private FileSystem fs;
   private Path testFilePath;
 
@@ -66,7 +64,6 @@ public class TestRowFilteringSkip {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     OrcConf.READER_USE_SELECTED.setBoolean(conf, true);
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestRowFilteringSkip." +
diff --git a/java/core/src/test/org/apache/orc/TestSelectedVector.java 
b/java/core/src/test/org/apache/orc/TestSelectedVector.java
index 3e2e4750f..b1accd78a 100644
--- a/java/core/src/test/org/apache/orc/TestSelectedVector.java
+++ b/java/core/src/test/org/apache/orc/TestSelectedVector.java
@@ -18,7 +18,6 @@
 
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -49,17 +48,15 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertNull;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestSelectedVector {
+public class TestSelectedVector implements TestConf {
 
   Path workDir = new Path(System.getProperty("test.tmp.dir"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   Random random = new Random();
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     conf.setInt(OrcConf.ROW_INDEX_STRIDE.getAttribute(), 
VectorizedRowBatch.DEFAULT_SIZE);
     fs = FileSystem.getLocal(conf);
     fs.setWorkingDirectory(workDir);
diff --git a/java/core/src/test/org/apache/orc/TestStringDictionary.java 
b/java/core/src/test/org/apache/orc/TestStringDictionary.java
index a7a1d714c..9f3d4eb11 100644
--- a/java/core/src/test/org/apache/orc/TestStringDictionary.java
+++ b/java/core/src/test/org/apache/orc/TestStringDictionary.java
@@ -51,18 +51,16 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
 
-public class TestStringDictionary {
+public class TestStringDictionary implements TestConf {
 
   private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" 
+ File.separator + "test"
       + File.separator + "tmp"));
 
-  private Configuration conf;
   private FileSystem fs;
   private Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestStringDictionary." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestTypeDescription.java 
b/java/core/src/test/org/apache/orc/TestTypeDescription.java
index 7dba23a9f..3f811803f 100644
--- a/java/core/src/test/org/apache/orc/TestTypeDescription.java
+++ b/java/core/src/test/org/apache/orc/TestTypeDescription.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.junit.jupiter.api.Test;
 
@@ -33,7 +32,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.fail;
 
-public class TestTypeDescription {
+public class TestTypeDescription implements TestConf {
   @Test
   public void testJson() {
     TypeDescription bin = TypeDescription.createBinary();
@@ -369,7 +368,6 @@ public class TestTypeDescription {
     // write a file with those attributes
     Path path = new Path(System.getProperty("test.tmp.dir",
         "target" + File.separator + "test" + File.separator + "tmp"), 
"attribute.orc");
-    Configuration conf = new Configuration();
     Writer writer = OrcFile.createWriter(path,
         OrcFile.writerOptions(conf).setSchema(schema).overwrite(true));
     writer.close();
diff --git a/java/core/src/test/org/apache/orc/TestUnicode.java 
b/java/core/src/test/org/apache/orc/TestUnicode.java
index 370664431..c901ec72f 100644
--- a/java/core/src/test/org/apache/orc/TestUnicode.java
+++ b/java/core/src/test/org/apache/orc/TestUnicode.java
@@ -18,7 +18,6 @@
 package org.apache.orc;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -36,11 +35,10 @@ import java.util.stream.Stream;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class TestUnicode {
+public class TestUnicode implements TestConf {
   Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + 
File.separator + "test"
       + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
@@ -66,7 +64,6 @@ public class TestUnicode {
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java 
b/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java
index d30fc98df..7735b59a0 100644
--- a/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java
+++ b/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java
@@ -20,7 +20,6 @@ package org.apache.orc;
 
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Longs;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -37,7 +36,7 @@ import java.util.stream.Stream;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class TestUnrolledBitPack {
+public class TestUnrolledBitPack implements TestConf {
 
   private static Stream<Arguments> data() {
     return Stream.of(
@@ -57,13 +56,11 @@ public class TestUnrolledBitPack {
   Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + 
File.separator + "test"
       + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git a/java/core/src/test/org/apache/orc/TestVectorOrcFile.java 
b/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
index c24514f69..76681f462 100644
--- a/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
+++ b/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
@@ -19,7 +19,6 @@
 package org.apache.orc;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
@@ -102,7 +101,7 @@ import static org.junit.jupiter.api.Assumptions.assumeTrue;
 /**
  * Tests for the vectorized reader and writer for ORC files.
  */
-public class TestVectorOrcFile {
+public class TestVectorOrcFile implements TestConf {
 
   private static Stream<Arguments> data() {
     return Stream.of(
@@ -193,13 +192,11 @@ public class TestVectorOrcFile {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestVectorOrcFile." +
         testInfo.getTestMethod().get().getName().replaceFirst("\\[[0-9]+\\]", 
"")
diff --git a/java/core/src/test/org/apache/orc/impl/TestBitPack.java 
b/java/core/src/test/org/apache/orc/impl/TestBitPack.java
index 53ac1ce4b..e2e1a67c8 100644
--- a/java/core/src/test/org/apache/orc/impl/TestBitPack.java
+++ b/java/core/src/test/org/apache/orc/impl/TestBitPack.java
@@ -18,10 +18,10 @@
 package org.apache.orc.impl;
 
 import com.google.common.primitives.Longs;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.orc.impl.writer.StreamOptions;
+import org.apache.orc.TestConf;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.TestInfo;
@@ -35,20 +35,18 @@ import java.util.Random;
 import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class TestBitPack {
+public class TestBitPack implements TestConf {
 
   private static final int SIZE = 100;
   private static Random rand = new Random(100);
   Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + 
File.separator + "test"
       + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
         testInfo.getTestMethod().get().getName() + ".orc");
diff --git 
a/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java 
b/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java
index 54d5ac143..f16d042fd 100644
--- a/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java
@@ -18,13 +18,13 @@
 
 package org.apache.orc.impl;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.orc.DecimalColumnStatistics;
 import org.apache.orc.OrcFile;
 import org.apache.orc.OrcProto;
 import org.apache.orc.Reader;
+import org.apache.orc.TestConf;
 import org.apache.orc.TimestampColumnStatistics;
 import org.apache.orc.TypeDescription;
 import org.junit.jupiter.api.Test;
@@ -37,7 +37,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertNull;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestColumnStatisticsImpl {
+public class TestColumnStatisticsImpl implements TestConf {
 
   @Test
   public void testUpdateDate() {
@@ -78,7 +78,6 @@ public class TestColumnStatisticsImpl {
     TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
     Path exampleDir = new Path(System.getProperty("example.dir"));
     Path file = new Path(exampleDir, "TestOrcFile.testTimestamp.orc");
-    Configuration conf = new Configuration();
     Reader reader = OrcFile.createReader(file, OrcFile.readerOptions(conf));
     TimestampColumnStatistics stats =
         (TimestampColumnStatistics) reader.getStatistics()[0];
diff --git 
a/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java 
b/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java
index 860b18aa7..53f94cbf7 100644
--- a/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java
+++ b/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java
@@ -36,6 +36,7 @@ import org.apache.orc.OrcFile;
 import org.apache.orc.OrcFile.WriterOptions;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
+import org.apache.orc.TestConf;
 import org.apache.orc.TestProlepticConversions;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
@@ -58,12 +59,11 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.Mockito.mock;
 
-public class TestConvertTreeReaderFactory {
+public class TestConvertTreeReaderFactory implements TestConf {
 
   private Path workDir =
       new Path(System.getProperty("test.tmp.dir", "target" + File.separator + 
"test" + File.separator + "tmp"));
 
-  private Configuration conf;
   private FileSystem fs;
   private Path testFilePath;
   private int LARGE_BATCH_SIZE;
@@ -74,7 +74,6 @@ public class TestConvertTreeReaderFactory {
   public void setupPath(TestInfo testInfo) throws Exception {
     // Default CV length is 1024
     this.LARGE_BATCH_SIZE = 1030;
-    this.conf = new Configuration();
     this.fs = FileSystem.getLocal(conf);
     this.testFilePath = new Path(workDir, TestWriterImpl.class.getSimpleName() 
+
         testInfo.getTestMethod().get().getName().replaceFirst("\\[[0-9]+]", 
"") +
@@ -85,7 +84,6 @@ public class TestConvertTreeReaderFactory {
   public <TExpectedColumnVector extends ColumnVector> TExpectedColumnVector 
createORCFileWithLargeArray(
       TypeDescription schema, Class<TExpectedColumnVector> expectedColumnType, 
boolean useDecimal64)
       throws IOException, ParseException {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     fs.setWorkingDirectory(workDir);
     Writer w = OrcFile.createWriter(testFilePath, 
OrcFile.writerOptions(conf).setSchema(schema));
@@ -115,7 +113,6 @@ public class TestConvertTreeReaderFactory {
   public <TExpectedColumnVector extends ColumnVector> TExpectedColumnVector 
createORCFileWithBatchesOfIncreasingSizeInDifferentStripes(
       TypeDescription schema, Class<TExpectedColumnVector> typeClass, boolean 
useDecimal64)
       throws IOException, ParseException {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     fs.setWorkingDirectory(workDir);
     WriterOptions options = OrcFile.writerOptions(conf);
@@ -178,8 +175,6 @@ public class TestConvertTreeReaderFactory {
     options.schema(schema);
     String expected = options.toString();
 
-    Configuration conf = new Configuration();
-
     Reader reader = OrcFile.createReader(testFilePath, 
OrcFile.readerOptions(conf));
     RecordReader rows = reader.rows(options);
     VectorizedRowBatch batch = schema.createRowBatchV2();
@@ -200,8 +195,6 @@ public class TestConvertTreeReaderFactory {
     options.schema(schema);
     String expected = options.toString();
 
-    Configuration conf = new Configuration();
-
     Reader reader = OrcFile.createReader(testFilePath, 
OrcFile.readerOptions(conf));
     RecordReader rows = reader.rows(options);
     VectorizedRowBatch batch = schema.createRowBatchV2();
@@ -693,8 +686,6 @@ public class TestConvertTreeReaderFactory {
     options.schema(schema);
     String expected = options.toString();
 
-    Configuration conf = new Configuration();
-
     Reader reader = OrcFile.createReader(testFilePath, 
OrcFile.readerOptions(conf));
     RecordReader rows = reader.rows(options);
     VectorizedRowBatch batch = schema.createRowBatch();
diff --git a/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java 
b/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java
index 73d7231e6..9c2d89110 100644
--- a/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java
+++ b/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java
@@ -18,12 +18,12 @@
 
 package org.apache.orc.impl;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.orc.EncryptionAlgorithm;
 import org.apache.orc.InMemoryKeystore;
 import org.apache.orc.OrcConf;
 import org.apache.orc.OrcProto;
+import org.apache.orc.TestConf;
 import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
@@ -35,7 +35,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertNull;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestCryptoUtils {
+public class TestCryptoUtils implements TestConf {
 
   @Test
   public void testCreateStreamIv() throws Exception {
@@ -56,7 +56,6 @@ public class TestCryptoUtils {
 
   @Test
   public void testMemoryKeyProvider() throws IOException {
-    Configuration conf = new Configuration();
     OrcConf.KEY_PROVIDER.setString(conf, "memory");
     // Hard code the random so that we know the bytes that will come out.
     InMemoryKeystore provider =
@@ -91,7 +90,6 @@ public class TestCryptoUtils {
 
   @Test
   public void testInvalidKeyProvider() throws IOException {
-    Configuration conf = new Configuration();
     OrcConf.KEY_PROVIDER.setString(conf, "");
     assertNull(CryptoUtils.getKeyProvider(conf, new Random()));
   }
diff --git a/java/core/src/test/org/apache/orc/impl/TestEncryption.java 
b/java/core/src/test/org/apache/orc/impl/TestEncryption.java
index 64fcbcf92..3ab2bb8b7 100644
--- a/java/core/src/test/org/apache/orc/impl/TestEncryption.java
+++ b/java/core/src/test/org/apache/orc/impl/TestEncryption.java
@@ -17,7 +17,6 @@
  */
 package org.apache.orc.impl;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -32,6 +31,7 @@ import org.apache.orc.OrcConf;
 import org.apache.orc.OrcFile;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
+import org.apache.orc.TestConf;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
 import org.junit.jupiter.api.AfterEach;
@@ -43,10 +43,9 @@ import java.nio.charset.StandardCharsets;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class TestEncryption {
+public class TestEncryption implements TestConf {
 
   Path workDir = new Path(System.getProperty("test.tmp.dir"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   TypeDescription schema;
@@ -56,11 +55,9 @@ public class TestEncryption {
 
   @BeforeEach
   public void openFileSystem() throws Exception {
-    conf = new Configuration();
     conf.setInt(OrcConf.ROW_INDEX_STRIDE.getAttribute(), 
VectorizedRowBatch.DEFAULT_SIZE);
     fs = FileSystem.getLocal(conf);
-    fs.setWorkingDirectory(workDir);
-    testFilePath = new Path("testWriterImpl.orc");
+    testFilePath = new Path(workDir, "TestEncryption.orc");
     fs.create(testFilePath, true);
     schema = TypeDescription.fromString("struct<id:int,name:string>");
     byte[] kmsKey = "secret123".getBytes(StandardCharsets.UTF_8);
diff --git a/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java 
b/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java
index 247f615a4..7f1f8359d 100644
--- a/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java
+++ b/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java
@@ -17,9 +17,9 @@
  */
 package org.apache.orc.impl;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.orc.MemoryManager;
+import org.apache.orc.TestConf;
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
@@ -32,7 +32,7 @@ import static org.mockito.ArgumentMatchers.eq;
 /**
  * Test the ORC memory manager.
  */
-public class TestMemoryManager {
+public class TestMemoryManager implements TestConf {
   private static final double ERROR = 0.000001;
 
   private static class NullCallback implements MemoryManagerImpl.Callback {
@@ -43,7 +43,6 @@ public class TestMemoryManager {
 
   @Test
   public void testBasics() throws Exception {
-    Configuration conf = new Configuration();
     MemoryManagerImpl mgr = new MemoryManagerImpl(conf);
     NullCallback callback = new NullCallback();
     long poolSize = mgr.getTotalMemoryPool();
@@ -71,7 +70,6 @@ public class TestMemoryManager {
 
   @Test
   public void testConfig() throws Exception {
-    Configuration conf = new Configuration();
     conf.set("hive.exec.orc.memory.pool", "0.9");
     MemoryManagerImpl mgr = new MemoryManagerImpl(conf);
     long mem =
@@ -84,7 +82,6 @@ public class TestMemoryManager {
 
   @Test
   public void testCallback() throws Exception {
-    Configuration conf = new Configuration();
     MemoryManagerImpl mgr = new MemoryManagerImpl(conf);
     long pool = mgr.getTotalMemoryPool();
     MemoryManager.Callback[] calls = new MemoryManager.Callback[20];
diff --git a/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java 
b/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java
index 54463a079..22ae8226f 100644
--- a/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java
+++ b/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java
@@ -15,7 +15,6 @@
  */
 package org.apache.orc.impl;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -26,6 +25,7 @@ import org.apache.orc.OrcConf;
 import org.apache.orc.OrcFile;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
+import org.apache.orc.TestConf;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
 import org.junit.jupiter.api.BeforeEach;
@@ -56,18 +56,16 @@ import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
 @ExtendWith(MockitoExtension.class)
-public class TestOrcLargeStripe {
+public class TestOrcLargeStripe implements TestConf {
 
   private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" 
+ File.separator + "test"
       + File.separator + "tmp"));
 
-  Configuration conf;
   FileSystem fs;
   private Path testFilePath;
 
   @BeforeEach
   public void openFileSystem(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
         testInfo.getTestMethod().get().getName() + ".orc");
@@ -136,7 +134,6 @@ public class TestOrcLargeStripe {
 
   @Test
   public void testConfigMaxChunkLimit() throws IOException {
-    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
     TypeDescription schema = TypeDescription.createTimestamp();
     fs.delete(testFilePath, false);
@@ -151,7 +148,6 @@ public class TestOrcLargeStripe {
     assertTrue(recordReader instanceof RecordReaderImpl);
     assertEquals(Integer.MAX_VALUE - 1024, ((RecordReaderImpl) 
recordReader).getMaxDiskRangeChunkLimit());
 
-    conf = new Configuration();
     conf.setInt(OrcConf.ORC_MAX_DISK_RANGE_CHUNK_LIMIT.getHiveConfName(), 
1000);
     opts = OrcFile.readerOptions(conf);
     reader = OrcFile.createReader(testFilePath, opts);
diff --git a/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java 
b/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java
index e343b8f42..003ae22a7 100644
--- a/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java
@@ -39,6 +39,7 @@ import org.apache.orc.OrcUtils;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
 import org.apache.orc.StripeStatistics;
+import org.apache.orc.TestConf;
 import org.apache.orc.TestVectorOrcFile;
 import org.apache.orc.TypeDescription;
 import org.junit.jupiter.api.BeforeEach;
@@ -60,7 +61,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestReaderImpl {
+public class TestReaderImpl implements TestConf {
   private Path workDir = new Path(System.getProperty("example.dir",
       "../../examples/"));
 
@@ -106,7 +107,6 @@ public class TestReaderImpl {
   public void testOptionSafety() throws IOException {
     Reader.Options options = new Reader.Options();
     String expected = options.toString();
-    Configuration conf = new Configuration();
     Path path = new Path(TestVectorOrcFile.getFileFromClasspath
         ("orc-file-11-format.orc"));
     try (Reader reader = OrcFile.createReader(path, 
OrcFile.readerOptions(conf));
@@ -310,7 +310,6 @@ public class TestReaderImpl {
 
   @Test
   public void testClosingRowsFirst() throws Exception {
-    Configuration conf = new Configuration();
     MockFileSystem fs = new MockFileSystem(conf);
     Reader reader = OrcFile.createReader(new Path("/foo"),
         OrcFile.readerOptions(conf).filesystem(fs));
@@ -329,7 +328,6 @@ public class TestReaderImpl {
 
   @Test
   public void testClosingReaderFirst() throws Exception {
-    Configuration conf = new Configuration();
     MockFileSystem fs = new MockFileSystem(conf);
     Reader reader = OrcFile.createReader(new Path("/foo"),
         OrcFile.readerOptions(conf).filesystem(fs));
@@ -344,7 +342,6 @@ public class TestReaderImpl {
 
   @Test
   public void testClosingMultiple() throws Exception {
-    Configuration conf = new Configuration();
     MockFileSystem fs = new MockFileSystem(conf);
     Reader reader = OrcFile.createReader(new Path("/foo"),
         OrcFile.readerOptions(conf).filesystem(fs));
@@ -359,7 +356,6 @@ public class TestReaderImpl {
 
   @Test
   public void testOrcTailStripeStats() throws Exception {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "orc_split_elim_new.orc");
     FileSystem fs = path.getFileSystem(conf);
     try (ReaderImpl reader = (ReaderImpl) OrcFile.createReader(path,
@@ -398,7 +394,6 @@ public class TestReaderImpl {
 
   @Test
   public void testGetRawDataSizeFromColIndices() throws Exception {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "orc_split_elim_new.orc");
     FileSystem fs = path.getFileSystem(conf);
     try (ReaderImpl reader = (ReaderImpl) OrcFile.createReader(path,
@@ -420,7 +415,6 @@ public class TestReaderImpl {
 
   private void CheckFileWithSargs(String fileName, String softwareVersion)
       throws IOException {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, fileName);
     FileSystem fs = path.getFileSystem(conf);
     try (ReaderImpl reader = (ReaderImpl) OrcFile.createReader(path,
@@ -450,7 +444,6 @@ public class TestReaderImpl {
 
   @Test
   public void testReadDecimalV2File() throws IOException {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "decimal64_v2_cplusplus.orc");
     FileSystem fs = path.getFileSystem(conf);
     try (ReaderImpl reader = (ReaderImpl) OrcFile.createReader(path,
@@ -489,7 +482,6 @@ public class TestReaderImpl {
 
   @Test
   public void testExtractFileTailIndexOutOfBoundsException() throws Exception {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "demo-11-none.orc");
     FileSystem fs = path.getFileSystem(conf);
     FileStatus fileStatus = fs.getFileStatus(path);
@@ -508,7 +500,6 @@ public class TestReaderImpl {
 
   @Test
   public void testWithoutCompressionBlockSize() throws IOException {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, 
"TestOrcFile.testWithoutCompressionBlockSize.orc");
     FileSystem fs = path.getFileSystem(conf);
     try (ReaderImpl reader = (ReaderImpl) OrcFile.createReader(path,
@@ -530,7 +521,6 @@ public class TestReaderImpl {
 
   @Test
   public void testSargSkipPickupGroupWithoutIndex() throws IOException {
-    Configuration conf = new Configuration();
     // We use ORC files in two languages to test, the previous Java version 
could not work
     // well when orc.row.index.stride > 0 and orc.create.index=false, now it 
can skip these row groups.
     Path[] paths = new Path[] {
diff --git a/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java 
b/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java
index 378f0fcda..f785e6e58 100644
--- a/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java
@@ -49,6 +49,7 @@ import org.apache.orc.OrcFile;
 import org.apache.orc.OrcProto;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
+import org.apache.orc.TestConf;
 import org.apache.orc.TestVectorOrcFile;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
@@ -103,7 +104,7 @@ import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
-public class TestRecordReaderImpl {
+public class TestRecordReaderImpl implements TestConf {
 
   // This is a work around until we update storage-api to allow 
ChronoLocalDate in
   // predicates.
@@ -114,7 +115,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testFindColumn() throws Exception {
-    Configuration conf = new Configuration();
     TypeDescription file = 
TypeDescription.fromString("struct<a:int,c:string,e:int>");
     TypeDescription reader = 
TypeDescription.fromString("struct<a:int,b:double,c:string,d:double,e:bigint>");
     SchemaEvolution evo = new SchemaEvolution(file, reader, new 
Reader.Options(conf));
@@ -127,7 +127,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testFindColumnCaseInsensitively() throws Exception {
-    Configuration conf = new Configuration();
     TypeDescription file = TypeDescription.fromString("struct<A:int>");
     TypeDescription reader = TypeDescription.fromString("struct<a:int>");
     conf.setBoolean("orc.schema.evolution.case.sensitive", false);
@@ -137,8 +136,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testForcePositionalEvolution() throws Exception {
-    Configuration conf = new Configuration();
-
     Path oldFilePath = new 
Path(TestVectorOrcFile.getFileFromClasspath("orc-file-11-format.orc"));
     Reader reader = OrcFile.createReader(oldFilePath,
         OrcFile.readerOptions(conf).filesystem(FileSystem.getLocal(conf)));
@@ -264,7 +261,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testMaxLengthToReader() throws Exception {
-    Configuration conf = new Configuration();
     OrcProto.Type rowType = OrcProto.Type.newBuilder()
         .setKind(OrcProto.Type.Kind.STRUCT).build();
     OrcProto.Footer footer = OrcProto.Footer.newBuilder()
@@ -1976,7 +1972,6 @@ public class TestRecordReaderImpl {
       "target" + File.separator + "test" + File.separator + "tmp"));
 
   private void closeMockedRecordReader(DataReader mockedDataReader) throws 
IOException {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "empty.orc");
     FileSystem.get(conf).delete(path, true);
     Writer writer = OrcFile.createWriter(path, OrcFile.writerOptions(conf)
@@ -2253,7 +2248,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testPickRowGroups() throws Exception {
-    Configuration conf = new Configuration();
     TypeDescription schema = TypeDescription.fromString("struct<x:int,y:int>");
     SchemaEvolution evolution = new SchemaEvolution(schema, schema,
         new Reader.Options(conf));
@@ -2302,7 +2296,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testPickRowGroupsError() throws Exception {
-    Configuration conf = new Configuration();
     TypeDescription schema = TypeDescription.fromString("struct<x:int,y:int>");
     SchemaEvolution evolution = new SchemaEvolution(schema, schema,
         new Reader.Options(conf));
@@ -2399,7 +2392,6 @@ public class TestRecordReaderImpl {
     when(mockedDataReader.clone()).thenReturn(mockedDataReader);
     doNothing().when(mockedDataReader).close();
 
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "empty.orc");
     FileSystem.get(conf).delete(path, true);
     OrcFile.WriterOptions options = 
OrcFile.writerOptions(conf).setSchema(TypeDescription.createLong());
@@ -2414,7 +2406,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testCloseAtConstructorException() throws Exception {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "oneRow.orc");
     FileSystem.get(conf).delete(path, true);
 
@@ -2445,7 +2436,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testSargApplier() throws Exception {
-    Configuration conf = new Configuration();
     TypeDescription schema = TypeDescription.createLong();
     SearchArgument sarg = SearchArgumentFactory.newBuilder().build();
     SchemaEvolution evo = new SchemaEvolution(schema, schema, new 
Reader.Options(conf));
@@ -2481,7 +2471,6 @@ public class TestRecordReaderImpl {
   @Test
   public void testStatisticsWithNoWrites() throws Exception {
     Path testFilePath = new Path(workDir, "rowIndexStrideNegative.orc");
-    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.get(conf);
     fs.delete(testFilePath, true);
 
@@ -2537,7 +2526,6 @@ public class TestRecordReaderImpl {
     Path filePath = new 
Path(ClassLoader.getSystemResource("orc-file-no-double-statistic.orc")
         .getPath());
 
-    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.get(conf);
 
     Reader reader = OrcFile.createReader(filePath,
@@ -2627,7 +2615,6 @@ public class TestRecordReaderImpl {
   }
 
   private void testSmallCompressionSizeOrc(int compressionSize) throws 
IOException {
-    Configuration conf = new Configuration();
     Path path = new Path(workDir, "smallCompressionSize.orc");
     FileSystem.get(conf).delete(path, true);
 
@@ -2673,7 +2660,6 @@ public class TestRecordReaderImpl {
   @Test
   public void testRowIndexStrideNegativeFilter() throws Exception {
     Path testFilePath = new Path(workDir, "rowIndexStrideNegative.orc");
-    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.get(conf);
     fs.delete(testFilePath, true);
 
@@ -2716,7 +2702,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public void testHadoopVectoredIO() throws Exception {
-    Configuration conf = new Configuration();
     Path filePath = new 
Path(TestVectorOrcFile.getFileFromClasspath("orc-file-11-format.orc"));
 
     FileSystem localFileSystem = FileSystem.getLocal(conf);
@@ -2736,7 +2721,6 @@ public class TestRecordReaderImpl {
 
   @Test
   public  void testDecimalIsRepeatingFlag() throws IOException {
-    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.get(conf);
     Path testFilePath = new Path(workDir, "testDecimalIsRepeatingFlag.orc");
     fs.delete(testFilePath, true);
diff --git a/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java 
b/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java
index 3a82fb5f2..dc7cc4ba2 100644
--- a/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java
+++ b/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java
@@ -32,6 +32,7 @@ import org.apache.orc.OrcFile;
 import org.apache.orc.OrcProto;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
+import org.apache.orc.TestConf;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
 import org.apache.orc.impl.reader.ReaderEncryption;
@@ -62,9 +63,8 @@ import static org.junit.jupiter.api.Assertions.assertSame;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestSchemaEvolution {
+public class TestSchemaEvolution implements TestConf {
 
-  Configuration conf;
   Reader.Options options;
   Path testFilePath;
   FileSystem fs;
@@ -73,7 +73,6 @@ public class TestSchemaEvolution {
 
   @BeforeEach
   public void setup(TestInfo testInfo) throws Exception {
-    conf = new Configuration();
     options = new Reader.Options(conf);
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
diff --git a/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java 
b/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java
index e5d2616cc..903e4e80c 100644
--- a/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java
@@ -19,7 +19,6 @@
 package org.apache.orc.impl;
 
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -40,20 +39,17 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 
-public class TestWriterImpl {
+public class TestWriterImpl implements TestConf {
 
   Path workDir = new Path(System.getProperty("test.tmp.dir"));
-  Configuration conf;
   FileSystem fs;
   Path testFilePath;
   TypeDescription schema;
 
   @BeforeEach
   public void openFileSystem() throws Exception {
-    conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    fs.setWorkingDirectory(workDir);
-    testFilePath = new Path("testWriterImpl.orc");
+    testFilePath = new Path(workDir, "testWriterImpl.orc");
     fs.create(testFilePath, true);
     schema = TypeDescription.fromString("struct<x:int,y:int>");
   }
diff --git a/java/core/src/test/org/apache/orc/impl/TestZlib.java 
b/java/core/src/test/org/apache/orc/impl/TestZlib.java
index 6e940923e..f9d5936bc 100644
--- a/java/core/src/test/org/apache/orc/impl/TestZlib.java
+++ b/java/core/src/test/org/apache/orc/impl/TestZlib.java
@@ -18,13 +18,13 @@
 
 package org.apache.orc.impl;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.orc.CompressionCodec;
 import org.apache.orc.OrcFile;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
+import org.apache.orc.TestConf;
 import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
@@ -35,7 +35,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.fail;
 
-public class TestZlib {
+public class TestZlib implements TestConf {
 
   @Test
   public void testNoOverflow() throws Exception {
@@ -65,7 +65,6 @@ public class TestZlib {
 
   @Test
   public void testCorruptZlibFile() {
-    Configuration conf = new Configuration();
     Path testFilePath = new Path(ClassLoader.
         getSystemResource("orc_corrupt_zlib.orc").getPath());
 
diff --git 
a/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilterService.java 
b/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilterService.java
index 923910ded..861cafa0e 100644
--- a/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilterService.java
+++ b/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilterService.java
@@ -20,6 +20,8 @@ package org.apache.orc.impl.filter;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.orc.filter.BatchFilter;
+import org.apache.orc.TestConf;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
 import java.lang.reflect.Method;
@@ -31,11 +33,9 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class TestPluginFilterService {
-  private final Configuration conf;
-
-  public TestPluginFilterService() {
-    conf = new Configuration();
+public class TestPluginFilterService implements TestConf {
+  @BeforeEach
+  public void addFilter() {
     conf.set("my.filter.col.name", "f2");
     conf.set("my.filter.col.value", "aBcd");
     conf.set("my.filter.scope", "file://db/table1/.*");
diff --git 
a/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilters.java 
b/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilters.java
index 85ec869db..d08416de3 100644
--- a/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilters.java
+++ b/java/core/src/test/org/apache/orc/impl/filter/TestPluginFilters.java
@@ -18,13 +18,13 @@
 
 package org.apache.orc.impl.filter;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
 import org.apache.orc.OrcConf;
 import org.apache.orc.OrcFile;
 import org.apache.orc.Reader;
+import org.apache.orc.TestConf;
 import org.apache.orc.filter.BatchFilter;
 import org.junit.jupiter.api.Test;
 
@@ -32,7 +32,7 @@ import static 
org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-public class TestPluginFilters extends ATestFilter {
+public class TestPluginFilters extends ATestFilter implements TestConf {
 
   @Test
   public void testPluginFilterWithSArg() {
@@ -40,7 +40,6 @@ public class TestPluginFilters extends ATestFilter {
              new String[] {"a", "B", "c", "dE", "e", "f"});
 
     // Define the plugin filter
-    Configuration conf = new Configuration();
     OrcConf.ALLOW_PLUGIN_FILTER.setBoolean(conf, true);
     conf.set("my.filter.name", "my_str_i_eq");
     conf.set("my.filter.col.name", "f2");
@@ -75,7 +74,6 @@ public class TestPluginFilters extends ATestFilter {
              new String[] {"a", "B", "c", "dE", "e", "f"});
 
     // Define the plugin filter
-    Configuration conf = new Configuration();
     OrcConf.ALLOW_PLUGIN_FILTER.setBoolean(conf, true);
     conf.set("my.filter.name", "my_str_i_eq");
     conf.set("my.filter.col.name", "f2");
@@ -109,7 +107,6 @@ public class TestPluginFilters extends ATestFilter {
              new String[] {"a", "B", "c", "dE", "e", "f"});
 
     // Define the plugin filter
-    Configuration conf = new Configuration();
     OrcConf.ALLOW_PLUGIN_FILTER.setBoolean(conf, false);
     conf.set("my.filter.name", "my_str_i_eq");
     conf.set("my.filter.col.name", "f2");
@@ -143,7 +140,6 @@ public class TestPluginFilters extends ATestFilter {
              new String[] {"a", "B", "c", "dE", "e", "f"});
 
     // Define the plugin filter
-    Configuration conf = new Configuration();
     OrcConf.ALLOW_PLUGIN_FILTER.setBoolean(conf, true);
     conf.set("my.filter.name", "my_str_i_eq");
     conf.set("my.filter.col.name", "f2");
@@ -177,7 +173,6 @@ public class TestPluginFilters extends ATestFilter {
              new String[] {"abcdef", "Abcdef", "aBcdef", null, "abcDef", 
"abcdEf"});
 
     // Define the plugin filter
-    Configuration conf = new Configuration();
     OrcConf.ALLOW_PLUGIN_FILTER.setBoolean(conf, true);
     conf.set("my.filter.name", "my_str_i_eq");
     conf.set("my.filter.col.name", "f2");
@@ -211,7 +206,6 @@ public class TestPluginFilters extends ATestFilter {
              new String[] {"abcdef", "Abcdef", "aBcdef", null, "abcDef", 
"abcdEf"});
 
     // Define the plugin filter
-    Configuration conf = new Configuration();
     OrcConf.ALLOW_PLUGIN_FILTER.setBoolean(conf, true);
     conf.set("my.filter.name", "my_str_i_eq");
     conf.set("my.filter.col.name", "f2");
diff --git 
a/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java 
b/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java
index f53b8f415..f04ccec8d 100644
--- a/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java
+++ b/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java
@@ -18,7 +18,6 @@
 
 package org.apache.orc.util;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -27,6 +26,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.orc.OrcFile;
 import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
+import org.apache.orc.TestConf;
 import org.apache.orc.TestVectorOrcFile;
 import org.apache.orc.TypeDescription;
 import org.junit.jupiter.api.Test;
@@ -40,11 +40,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
 /**
  * Tests for StreamWrapperFileSystem.
  */
-public class TestStreamWrapperFileSystem {
+public class TestStreamWrapperFileSystem implements TestConf {
 
   @Test
   public void testWrapper() throws IOException {
-    Configuration conf = new Configuration();
     Path realFilename = new Path(TestVectorOrcFile.getFileFromClasspath(
         "orc-file-11-format.orc"));
     FileSystem local = FileSystem.getLocal(conf);

Reply via email to