This is an automated email from the ASF dual-hosted git repository.

huaxingao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2754d75e1b3 [SPARK-39116][SQL] Replcace double negation in `exists` 
with `forall`
2754d75e1b3 is described below

commit 2754d75e1b33ae191616077bd23801edbd7c7c49
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Fri May 6 11:30:07 2022 -0700

    [SPARK-39116][SQL] Replcace double negation in `exists` with `forall`
    
    ### What changes were proposed in this pull request?
    This is a minor code simplification:
    **Before**
    
    ```scala
    !Seq(1, 2).exists(x => !condition(x))
    ```
    
    **After**
    
    ```scala
    Seq(1, 2).forall(x => condition(x))
    ```
    
    ### Why are the changes needed?
    Code simplification
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GA
    
    Closes #36470 from LuciferYang/SPARK-39116.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: huaxingao <huaxin_...@apple.com>
---
 .../main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala  | 2 +-
 .../spark/sql/catalyst/plans/logical/basicLogicalOperators.scala      | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 906077a9c0e..817a62fd1d8 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -3159,7 +3159,7 @@ class Analyzer(override val catalogManager: 
CatalogManager)
       // We only extract Window Expressions after all expressions of the 
Project
       // have been resolved.
       case p @ Project(projectList, child)
-        if hasWindowFunction(projectList) && 
!p.expressions.exists(!_.resolved) =>
+        if hasWindowFunction(projectList) && p.expressions.forall(_.resolved) 
=>
         val (windowExpressions, regularExpressions) = 
extract(projectList.toIndexedSeq)
         // We add a project to get all needed expressions for window 
expressions from the child
         // of the original Project operator.
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
index 419e28c8007..e38fa627346 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
@@ -81,7 +81,7 @@ case class Project(projectList: Seq[NamedExpression], child: 
LogicalPlan)
       }.nonEmpty
     )
 
-    !expressions.exists(!_.resolved) && childrenResolved && 
!hasSpecialExpressions
+    expressions.forall(_.resolved) && childrenResolved && 
!hasSpecialExpressions
   }
 
   override lazy val validConstraints: ExpressionSet =
@@ -985,7 +985,7 @@ case class Aggregate(
       }.nonEmpty
     )
 
-    !expressions.exists(!_.resolved) && childrenResolved && 
!hasWindowExpressions
+    expressions.forall(_.resolved) && childrenResolved && !hasWindowExpressions
   }
 
   override def output: Seq[Attribute] = aggregateExpressions.map(_.toAttribute)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to