This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 46c7ff8ddf49 [SPARK-45357][CONNECT][TESTS] Normalize `dataframeId` 
when comparing `CollectMetrics` in `SparkConnectProtoSuite`
46c7ff8ddf49 is described below

commit 46c7ff8ddf49156b0d79b5c8c6a0103d3fd0487c
Author: yangjie01 <[email protected]>
AuthorDate: Fri Oct 6 14:09:51 2023 +0800

    [SPARK-45357][CONNECT][TESTS] Normalize `dataframeId` when comparing 
`CollectMetrics` in `SparkConnectProtoSuite`
    
    ### What changes were proposed in this pull request?
    This PR add a new function `normalizeDataframeId` to sets the `dataframeId` 
to the constant 0 of `CollectMetrics`  before comparing `LogicalPlan` in the 
test case of `SparkConnectProtoSuite`.
    
    ### Why are the changes needed?
    The test scenario in `SparkConnectProtoSuite` does not need to compare the 
`dataframeId` in `CollectMetrics`
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    - Manually check
    
    run
    
    ```
    build/mvn clean install -pl connector/connect/server -am -DskipTests
    build/mvn test -pl connector/connect/server
    ```
    
    **Before**
    
    ```
    - Test observe *** FAILED ***
      == FAIL: Plans do not match ===
      !CollectMetrics my_metric, [min(id#0) AS min_val#0, max(id#0) AS 
max_val#0, sum(id#0) AS sum(id)#0L], 0   CollectMetrics my_metric, [min(id#0) 
AS min_val#0, max(id#0) AS max_val#0, sum(id#0) AS sum(id)#0L], 53
       +- LocalRelation <empty>, [id#0, name#0]                                 
                                +- LocalRelation <empty>, [id#0, name#0] 
(PlanTest.scala:179)
    ```
    
    **After**
    
    ```
    Run completed in 41 seconds, 631 milliseconds.
    Total number of tests run: 882
    Suites: completed 24, aborted 0
    Tests: succeeded 882, failed 0, canceled 0, ignored 0, pending 0
    All tests passed.
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #43155 from LuciferYang/SPARK-45357.
    
    Authored-by: yangjie01 <[email protected]>
    Signed-off-by: Ruifeng Zheng <[email protected]>
---
 .../apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala  | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
index 21c3e8f3740e..c54aa496c667 100644
--- 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
+++ 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
@@ -30,7 +30,7 @@ import org.apache.spark.sql.{AnalysisException, Column, 
DataFrame, Observation,
 import org.apache.spark.sql.catalyst.analysis
 import org.apache.spark.sql.catalyst.expressions.{AttributeReference, 
GenericInternalRow, UnsafeProjection}
 import org.apache.spark.sql.catalyst.plans.{FullOuter, Inner, LeftAnti, 
LeftOuter, LeftSemi, PlanTest, RightOuter}
-import org.apache.spark.sql.catalyst.plans.logical.{Distinct, LocalRelation, 
LogicalPlan}
+import org.apache.spark.sql.catalyst.plans.logical.{CollectMetrics, Distinct, 
LocalRelation, LogicalPlan}
 import org.apache.spark.sql.catalyst.types.DataTypeUtils
 import org.apache.spark.sql.connect.common.InvalidPlanInput
 import 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter.toLiteralProto
@@ -1067,7 +1067,10 @@ class SparkConnectProtoSuite extends PlanTest with 
SparkConnectPlanTest {
 
   // Compares proto plan with LogicalPlan.
   private def comparePlans(connectPlan: proto.Relation, sparkPlan: 
LogicalPlan): Unit = {
+    def normalizeDataframeId(plan: LogicalPlan): LogicalPlan = plan transform {
+      case cm: CollectMetrics => cm.copy(dataframeId = 0)
+    }
     val connectAnalyzed = analyzePlan(transform(connectPlan))
-    comparePlans(connectAnalyzed, sparkPlan, false)
+    comparePlans(normalizeDataframeId(connectAnalyzed), 
normalizeDataframeId(sparkPlan), false)
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to