This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new c8c97b7b8ab [SPARK-46031][SQL] Replace `!Optional.isPresent()` with 
`Optional.isEmpty()`
c8c97b7b8ab is described below

commit c8c97b7b8ab9fde7024547b9cefa34c2e817c5f6
Author: yangjie01 <[email protected]>
AuthorDate: Tue Nov 21 10:20:18 2023 -0800

    [SPARK-46031][SQL] Replace `!Optional.isPresent()` with `Optional.isEmpty()`
    
    ### What changes were proposed in this pull request?
    This pr use `Optional.isEmpty()` instead of `!Optional.isPresent()` in Java 
code:
    
    `isPresent` is:
    
    ```java
    public boolean isPresent() {
      return value != null;
    }
    ```
    
    and `isEmpty` is:
    
    ```java
    public boolean isEmpty() {
      return value == null;
    }
    ```
    
    so `!isPresent` and `isEmpty` are equivalent, but `isEmpty` looks more 
concise
    
    ### Why are the changes needed?
    Semantic simplification
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #43931 from LuciferYang/isPresent-2-isEmpty.
    
    Authored-by: yangjie01 <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../sql/execution/datasources/orc/OrcColumnarBatchReader.java     | 2 +-
 .../sql/execution/datasources/parquet/ParquetColumnVector.java    | 2 +-
 .../spark/sql/execution/vectorized/WritableColumnVector.java      | 8 ++++----
 3 files changed, 6 insertions(+), 6 deletions(-)

diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnarBatchReader.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnarBatchReader.java
index b6184baa2e0..c1afad2aca4 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnarBatchReader.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnarBatchReader.java
@@ -183,7 +183,7 @@ public class OrcColumnarBatchReader extends 
RecordReader<Void, ColumnarBatch> {
           Object defaultValue = 
ResolveDefaultColumns.existenceDefaultValues(requiredSchema)[i];
           if (defaultValue == null) {
             missingCol.putNulls(0, capacity);
-          } else if (!missingCol.appendObjects(capacity, 
defaultValue).isPresent()) {
+          } else if (missingCol.appendObjects(capacity, 
defaultValue).isEmpty()) {
             throw new IllegalArgumentException("Cannot assign default column 
value to result " +
               "column batch in vectorized Orc reader because the data type is 
not supported: " +
               defaultValue);
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetColumnVector.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetColumnVector.java
index 5198096fe01..7f5b69a09e9 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetColumnVector.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetColumnVector.java
@@ -92,7 +92,7 @@ final class ParquetColumnVector {
       // the appendObjects method. This delegates to some specific append* 
method depending on the
       // type of 'defaultValue'; for example, if 'defaultValue' is a Float, 
then we call the
       // appendFloats method.
-      if (!vector.appendObjects(capacity, defaultValue).isPresent()) {
+      if (vector.appendObjects(capacity, defaultValue).isEmpty()) {
         throw new IllegalArgumentException("Cannot assign default column value 
to result " +
           "column batch in vectorized Parquet reader because the data type is 
not supported: " +
           defaultValue);
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
index 10907c69c22..856132fe14f 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
@@ -757,7 +757,7 @@ public abstract class WritableColumnVector extends 
ColumnVector {
       for (int i = 0; i < length; ++i) {
         appendArray(arrayData.numElements());
         for (Object element : arrayData.array()) {
-          if (!arrayData().appendObjects(1, element).isPresent()) {
+          if (arrayData().appendObjects(1, element).isEmpty()) {
             return Optional.empty();
           }
         }
@@ -771,7 +771,7 @@ public abstract class WritableColumnVector extends 
ColumnVector {
         appendStruct(false);
         for (int j = 0; j < row.values().length; ++j) {
           Object element = row.values()[j];
-          if (!childColumns[j].appendObjects(1, element).isPresent()) {
+          if (childColumns[j].appendObjects(1, element).isEmpty()) {
             return Optional.empty();
           }
         }
@@ -784,12 +784,12 @@ public abstract class WritableColumnVector extends 
ColumnVector {
       int result = 0;
       for (int i = 0; i < length; ++i) {
         for (Object key : data.keyArray().array()) {
-          if (!childColumns[0].appendObjects(1, key).isPresent()) {
+          if (childColumns[0].appendObjects(1, key).isEmpty()) {
             return Optional.empty();
           }
         }
         for (Object val: data.valueArray().array()) {
-          if (!childColumns[1].appendObjects(1, val).isPresent()) {
+          if (childColumns[1].appendObjects(1, val).isEmpty()) {
             return Optional.empty();
           }
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to