This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b5408e1ce61c [SPARK-45828][SQL] Remove deprecated method in dsl
b5408e1ce61c is described below

commit b5408e1ce61ce2195de72dcf79d8355c16b4b92a
Author: panbingkun <[email protected]>
AuthorDate: Wed Nov 8 08:24:52 2023 -0800

    [SPARK-45828][SQL] Remove deprecated method in dsl
    
    ### What changes were proposed in this pull request?
    The pr aims to remove `some deprecated method` in dsl.
    
    ### Why are the changes needed?
    After https://github.com/apache/spark/pull/36646 (Apache Spark 3.4.0), the 
method `def as(alias: Symbol): NamedExpression = Alias(expr, alias.name)()` and 
`def subquery(alias: Symbol): LogicalPlan = SubqueryAlias(alias.name, 
logicalPlan)` has been marked as `deprecated` and we need to remove it in 
`Spark 4.0`.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #43708 from panbingkun/SPARK-45828.
    
    Authored-by: panbingkun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../main/scala/org/apache/spark/sql/catalyst/dsl/package.scala    | 6 ------
 .../spark/sql/catalyst/optimizer/TransposeWindowSuite.scala       | 8 ++++----
 .../org/apache/spark/sql/catalyst/plans/LogicalPlanSuite.scala    | 6 +++---
 3 files changed, 7 insertions(+), 13 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index 5f85716fa283..30d4c2dbb409 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -152,9 +152,6 @@ package object dsl {
     def desc: SortOrder = SortOrder(expr, Descending)
     def desc_nullsFirst: SortOrder = SortOrder(expr, Descending, NullsFirst, 
Seq.empty)
     def as(alias: String): NamedExpression = Alias(expr, alias)()
-    // TODO: Remove at Spark 4.0.0
-    @deprecated("Use as(alias: String)", "3.4.0")
-    def as(alias: Symbol): NamedExpression = Alias(expr, alias.name)()
   }
 
   trait ExpressionConversions {
@@ -468,9 +465,6 @@ package object dsl {
           limit: Int): LogicalPlan =
         WindowGroupLimit(partitionSpec, orderSpec, rankLikeFunction, limit, 
logicalPlan)
 
-      // TODO: Remove at Spark 4.0.0
-      @deprecated("Use subquery(alias: String)", "3.4.0")
-      def subquery(alias: Symbol): LogicalPlan = SubqueryAlias(alias.name, 
logicalPlan)
       def subquery(alias: String): LogicalPlan = SubqueryAlias(alias, 
logicalPlan)
       def as(alias: String): LogicalPlan = SubqueryAlias(alias, logicalPlan)
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/TransposeWindowSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/TransposeWindowSuite.scala
index 8d4c2de10e34..f4d520bbb443 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/TransposeWindowSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/TransposeWindowSuite.scala
@@ -146,15 +146,15 @@ class TransposeWindowSuite extends PlanTest {
   test("SPARK-38034: transpose two adjacent windows with compatible partitions 
" +
     "which is not a prefix") {
     val query = testRelation
-      .window(Seq(sum(c).as(Symbol("sum_a_2"))), partitionSpec4, orderSpec2)
-      .window(Seq(sum(c).as(Symbol("sum_a_1"))), partitionSpec3, orderSpec1)
+      .window(Seq(sum(c).as("sum_a_2")), partitionSpec4, orderSpec2)
+      .window(Seq(sum(c).as("sum_a_1")), partitionSpec3, orderSpec1)
 
     val analyzed = query.analyze
     val optimized = Optimize.execute(analyzed)
 
     val correctAnswer = testRelation
-      .window(Seq(sum(c).as(Symbol("sum_a_1"))), partitionSpec3, orderSpec1)
-      .window(Seq(sum(c).as(Symbol("sum_a_2"))), partitionSpec4, orderSpec2)
+      .window(Seq(sum(c).as("sum_a_1")), partitionSpec3, orderSpec1)
+      .window(Seq(sum(c).as("sum_a_2")), partitionSpec4, orderSpec2)
       .select(Symbol("a"), Symbol("b"), Symbol("c"), Symbol("d"),
         Symbol("sum_a_2"), Symbol("sum_a_1"))
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/LogicalPlanSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/LogicalPlanSuite.scala
index ea0fcac881c7..3eba9eebc3d5 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/LogicalPlanSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/LogicalPlanSuite.scala
@@ -126,12 +126,12 @@ class LogicalPlanSuite extends SparkFunSuite {
     assert(sort2.maxRows === Some(100))
     assert(sort2.maxRowsPerPartition === Some(100))
 
-    val c1 = 
Literal(1).as(Symbol("a")).toAttribute.newInstance().withNullability(true)
-    val c2 = 
Literal(2).as(Symbol("b")).toAttribute.newInstance().withNullability(true)
+    val c1 = Literal(1).as("a").toAttribute.newInstance().withNullability(true)
+    val c2 = Literal(2).as("b").toAttribute.newInstance().withNullability(true)
     val expand = Expand(
       Seq(Seq(Literal(null), Symbol("b")), Seq(Symbol("a"), Literal(null))),
       Seq(c1, c2),
-      sort.select(Symbol("id") as Symbol("a"), Symbol("id") + 1 as 
Symbol("b")))
+      sort.select(Symbol("id") as "a", Symbol("id") + 1 as "b"))
     assert(expand.maxRows === Some(200))
     assert(expand.maxRowsPerPartition === Some(68))
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to