This is an automated email from the ASF dual-hosted git repository. prasanthj pushed a commit to branch revert-322-ORC-419 in repository https://gitbox.apache.org/repos/asf/orc.git
commit ee41fff928f66c7416f45f05d59658514a0086cb Author: Prasanth Jayachandran <j.prasant...@gmail.com> AuthorDate: Wed Oct 17 15:11:35 2018 -0700 Revert "ORC-419: Ensure to call `close` at RecordReaderImpl constructor exception" --- .../java/org/apache/orc/impl/RecordReaderImpl.java | 9 +----- .../org/apache/orc/impl/TestRecordReaderImpl.java | 35 ---------------------- 2 files changed, 1 insertion(+), 43 deletions(-) diff --git a/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java b/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java index 2b8a6bf..731b46e 100644 --- a/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java +++ b/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java @@ -269,14 +269,7 @@ public class RecordReaderImpl implements RecordReader { indexes = new OrcProto.RowIndex[types.size()]; bloomFilterIndices = new OrcProto.BloomFilterIndex[types.size()]; bloomFilterKind = new OrcProto.Stream.Kind[types.size()]; - - try { - advanceToNextRow(reader, 0L, true); - } catch (IOException e) { - // Try to close since this happens in constructor. - close(); - throw e; - } + advanceToNextRow(reader, 0L, true); } public static final class PositionProviderImpl implements PositionProvider { diff --git a/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java b/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java index 529a08b..66951ff 100644 --- a/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java +++ b/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java @@ -24,12 +24,10 @@ import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.mockito.Matchers.any; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -56,8 +54,6 @@ import org.apache.hadoop.fs.PositionedReadable; import org.apache.hadoop.fs.Seekable; import org.apache.hadoop.hive.common.io.DiskRangeList; import org.apache.hadoop.hive.common.type.HiveDecimal; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory; import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentImpl; @@ -2123,35 +2119,4 @@ public class TestRecordReaderImpl { RecordReader recordReader = reader.rows(readerOptions); recordReader.close(); } - - @Test - public void testCloseAtConstructorException() throws Exception { - Configuration conf = new Configuration(); - Path path = new Path(workDir, "oneRow.orc"); - FileSystem.get(conf).delete(path, true); - - TypeDescription schema = TypeDescription.createLong(); - OrcFile.WriterOptions options = OrcFile.writerOptions(conf).setSchema(schema); - Writer writer = OrcFile.createWriter(path, options); - VectorizedRowBatch writeBatch = schema.createRowBatch(); - int row = writeBatch.size++; - ((LongColumnVector) writeBatch.cols[0]).vector[row] = 0; - writer.addRowBatch(writeBatch); - writer.close(); - - DataReader mockedDataReader = mock(DataReader.class); - when(mockedDataReader.clone()).thenReturn(mockedDataReader); - doThrow(new IOException()).when(mockedDataReader).readStripeFooter(any()); - - Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf)); - Reader.Options readerOptions = reader.options().dataReader(mockedDataReader); - boolean isCalled = false; - try { - reader.rows(readerOptions); - } catch (IOException ie) { - isCalled = true; - } - assertTrue(isCalled); - verify(mockedDataReader, times(1)).close(); - } }