Repository: hive
Updated Branches:
  refs/heads/master 17441e485 -> a4198f584


HIVE-18828 : improve error handling for codecs in LLAP IO (Sergey Shelukhin, 
reviewed by Gopal Vijayaraghavan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e3d85c4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e3d85c4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e3d85c4

Branch: refs/heads/master
Commit: 5e3d85c409b14afb6eb94bad01348d013a536503
Parents: 17441e4
Author: sergey <ser...@apache.org>
Authored: Thu Mar 1 15:50:04 2018 -0800
Committer: sergey <ser...@apache.org>
Committed: Fri Mar 2 11:24:34 2018 -0800

----------------------------------------------------------------------
 .../llap/io/encoded/OrcEncodedDataReader.java   | 19 +++++++++-----
 .../ql/io/orc/encoded/EncodedReaderImpl.java    | 26 +++++++++++++++-----
 2 files changed, 33 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/5e3d85c4/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index a6d2a04..9219d28 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -752,14 +752,21 @@ public class OrcEncodedDataReader extends 
CallableWithNdc<Void>
     CompressionKind kind = orcReader.getCompressionKind();
     boolean isPool = useCodecPool;
     CompressionCodec codec = isPool ? OrcCodecPool.getCodec(kind) : 
WriterImpl.createCodec(kind);
+    boolean isCodecError = true;
     try {
-      return buildStripeFooter(Lists.<DiskRange>newArrayList(new 
BufferChunk(bb, 0)),
-          bb.remaining(), codec, orcReader.getCompressionSize());
+      OrcProto.StripeFooter result = 
buildStripeFooter(Lists.<DiskRange>newArrayList(
+          new BufferChunk(bb, 0)), bb.remaining(), codec, 
orcReader.getCompressionSize());
+      isCodecError = false;
+      return result;
     } finally {
-      if (isPool) {
-        OrcCodecPool.returnCodec(kind, codec);
-      } else {
-        codec.close();
+      try {
+        if (isPool && !isCodecError) {
+          OrcCodecPool.returnCodec(kind, codec);
+        } else {
+          codec.close();
+        }
+      } catch (Exception ex) {
+        LOG.error("Ignoring codec cleanup error", ex);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/5e3d85c4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
index 32bdf6e..893a2bb 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
@@ -130,6 +130,7 @@ class EncodedReaderImpl implements EncodedReader {
   private boolean isDataReaderOpen = false;
   private final CompressionCodec codec;
   private final boolean isCodecFromPool;
+  private boolean isCodecFailure = false;
   private final boolean isCompressed;
   private final org.apache.orc.CompressionKind compressionKind;
   private final int bufferSize;
@@ -677,12 +678,17 @@ class EncodedReaderImpl implements EncodedReader {
 
   @Override
   public void close() throws IOException {
-    if (isCodecFromPool) {
-      OrcCodecPool.returnCodec(compressionKind, codec);
-    } else {
-      codec.close();
+    try {
+      if (isCodecFromPool && !isCodecFailure) {
+        OrcCodecPool.returnCodec(compressionKind, codec);
+      } else {
+        codec.close();
+      }
+    } catch (Exception ex) {
+      LOG.error("Ignoring error from codec", ex);
+    } finally {
+      dataReader.close();
     }
-    dataReader.close();
   }
 
   /**
@@ -870,7 +876,15 @@ class EncodedReaderImpl implements EncodedReader {
     for (ProcCacheChunk chunk : toDecompress) {
       ByteBuffer dest = chunk.getBuffer().getByteBufferRaw();
       if (chunk.isOriginalDataCompressed) {
-        decompressChunk(chunk.originalData, codec, dest);
+        boolean isOk = false;
+        try {
+          decompressChunk(chunk.originalData, codec, dest);
+          isOk = true;
+        } finally {
+          if (!isOk) {
+            isCodecFailure = true;
+          }
+        }
       } else {
         copyUncompressedChunk(chunk.originalData, dest);
       }

Reply via email to