This is an automated email from the ASF dual-hosted git repository.

wyk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git


The following commit(s) were added to refs/heads/master by this push:
     new b40b25852f [NO ISSUE][STO] Clean cloud read loggings
b40b25852f is described below

commit b40b25852f979afcad894df4a57ca803e348a802
Author: Wail Alkowaileet <[email protected]>
AuthorDate: Fri Jun 28 13:01:59 2024 -0700

    [NO ISSUE][STO] Clean cloud read loggings
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    - Reduce the log level if the read failure
      caused by an interrupt
    - Reduce the log level from ERROR to WARN
    
    Change-Id: Iac4be79140c6b354b2e344dd057abc108d20b78e
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/18406
    Integration-Tests: Jenkins <[email protected]>
    Reviewed-by: Murtadha Hubail <[email protected]>
    Tested-by: Jenkins <[email protected]>
---
 .../cloud/clients/aws/s3/S3BufferedWriter.java     |  1 -
 .../buffercache/read/CloudMegaPageReadContext.java | 24 ++++++++++++++--------
 .../storage/common/buffercache/BufferCache.java    | 11 ++++------
 3 files changed, 19 insertions(+), 17 deletions(-)

diff --git 
a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/aws/s3/S3BufferedWriter.java
 
b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/aws/s3/S3BufferedWriter.java
index 05a9fc183d..940a0a6db6 100644
--- 
a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/aws/s3/S3BufferedWriter.java
+++ 
b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/aws/s3/S3BufferedWriter.java
@@ -44,7 +44,6 @@ import 
software.amazon.awssdk.services.s3.model.UploadPartRequest;
 
 public class S3BufferedWriter implements ICloudBufferedWriter {
     private static final String PUT_UPLOAD_ID = "putUploadId";
-    private static final int MAX_RETRIES = 3;
 
     private static final Logger LOGGER = LogManager.getLogger();
     private final S3Client s3Client;
diff --git 
a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree-column/src/main/java/org/apache/hyracks/storage/am/lsm/btree/column/cloud/buffercache/read/CloudMegaPageReadContext.java
 
b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree-column/src/main/java/org/apache/hyracks/storage/am/lsm/btree/column/cloud/buffercache/read/CloudMegaPageReadContext.java
index 7ad0029e25..1c42ec0285 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree-column/src/main/java/org/apache/hyracks/storage/am/lsm/btree/column/cloud/buffercache/read/CloudMegaPageReadContext.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree-column/src/main/java/org/apache/hyracks/storage/am/lsm/btree/column/cloud/buffercache/read/CloudMegaPageReadContext.java
@@ -21,13 +21,13 @@ package 
org.apache.hyracks.storage.am.lsm.btree.column.cloud.buffercache.read;
 import static 
org.apache.hyracks.storage.am.lsm.btree.column.api.projection.ColumnProjectorType.MERGE;
 import static 
org.apache.hyracks.storage.common.buffercache.context.read.DefaultBufferCacheReadContextProvider.DEFAULT;
 
-import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.List;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.util.ExceptionUtils;
 import 
org.apache.hyracks.cloud.buffercache.context.BufferCacheCloudReadContextUtil;
 import org.apache.hyracks.cloud.buffercache.page.CloudCachedPage;
 import org.apache.hyracks.cloud.io.ICloudIOManager;
@@ -44,6 +44,7 @@ import org.apache.hyracks.storage.common.disk.IPhysicalDrive;
 import org.apache.hyracks.storage.common.file.BufferedFileHandle;
 import org.apache.hyracks.util.IThreadStats;
 import org.apache.hyracks.util.annotations.NotThreadSafe;
+import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
@@ -215,7 +216,7 @@ public final class CloudMegaPageReadContext implements 
IBufferCacheReadContext {
             stream.read(buffer);
             threadStats.cloudPageRead();
         } catch (Throwable th) {
-            LOGGER.warn("Failed to READ {} bytes from stream {}", length, 
gapStream);
+            LOGGER.log(getLevel(th), "Failed to READ {} bytes from stream {}", 
length, gapStream, th);
             throw HyracksDataException.create(th);
         }
     }
@@ -229,9 +230,9 @@ public final class CloudMegaPageReadContext implements 
IBufferCacheReadContext {
         long newOffset = cPage.getCompressedPageOffset() + 
cPage.getCompressedPageSize();
         try {
             gapStream.skipTo(newOffset);
-        } catch (IOException e) {
-            LOGGER.warn("Failed to SKIP to new offset {} from stream {}", 
newOffset, gapStream);
-            throw HyracksDataException.create(e);
+        } catch (Throwable th) {
+            LOGGER.log(getLevel(th), "Failed to SKIP to new offset {} from 
stream {}", newOffset, gapStream, th);
+            throw HyracksDataException.create(th);
         }
     }
 
@@ -245,11 +246,16 @@ public final class CloudMegaPageReadContext implements 
IBufferCacheReadContext {
                     pinnedPages.add(bufferCache.pin(dpid, this));
                 }
                 pageCounter++;
-            } catch (Throwable e) {
-                LOGGER.error("Error while pinning page number {} with number 
of pages {}. "
-                        + "stream: {}, columnRanges:\n {}", i, numberOfPages, 
gapStream, columnRanges);
-                throw e;
+            } catch (Throwable th) {
+                LOGGER.log(getLevel(th),
+                        "Error while pinning page number {} with number of 
pages {}. stream: {}, columnRanges:\n {}", i,
+                        numberOfPages, gapStream, columnRanges, th);
+                throw th;
             }
         }
     }
+
+    private static Level getLevel(Throwable th) {
+        return ExceptionUtils.causedByInterrupt(th) ? Level.DEBUG : Level.WARN;
+    }
 }
diff --git 
a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
 
b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
index c37bf98ebc..15a019c595 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
@@ -214,10 +214,6 @@ public class BufferCache implements IBufferCacheInternal, 
ILifeCycleComponent, I
                     try {
                         tryRead(cPage, context);
                         cPage.valid = true;
-                    } catch (Exception e) {
-                        LOGGER.log(ExceptionUtils.causedByInterrupt(e) ? 
Level.DEBUG : Level.WARN,
-                                "Failure while trying to read a page from 
disk", e);
-                        throw e;
                     } finally {
                         if (!cPage.valid) {
                             unpin(cPage, context);
@@ -573,9 +569,10 @@ public class BufferCache implements IBufferCacheInternal, 
ILifeCycleComponent, I
                 statsSubscribers.getOrDefault(Thread.currentThread(), 
NoOpThreadStats.INSTANCE);
         try {
             fInfo.read(cPage, context, threadStats);
-        } catch (Throwable e) {
-            LOGGER.error("Error while reading a page {} in file {}", cPage, 
fInfo);
-            throw e;
+        } catch (Throwable th) {
+            LOGGER.log(ExceptionUtils.causedByInterrupt(th) ? Level.DEBUG : 
Level.WARN,
+                    "Error while reading a page {} in file {}", cPage, fInfo, 
th);
+            throw th;
         }
 
         if (context.incrementStats()) {

Reply via email to