This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 88b983d9f2a [SPARK-39503][SQL][FOLLOWUP] Fix ansi golden files and typo
88b983d9f2a is described below

commit 88b983d9f2a7190b8d74a6176740afb65fa08223
Author: ulysses-you <[email protected]>
AuthorDate: Thu Jul 7 13:17:11 2022 +0900

    [SPARK-39503][SQL][FOLLOWUP] Fix ansi golden files and typo
    
    ### What changes were proposed in this pull request?
    
    - re-generate ansi golden files
    - fix FunctionIdentifier parameter name typo
    ### Why are the changes needed?
    
    Fix ansi golden files and typo
    
    ### Does this PR introduce _any_ user-facing change?
    
    no, not released
    
    ### How was this patch tested?
    
    pass CI
    
    Closes #37111 from ulysses-you/catalog-followup.
    
    Authored-by: ulysses-you <[email protected]>
    Signed-off-by: Hyukjin Kwon <[email protected]>
---
 .../apache/spark/sql/catalyst/identifiers.scala    |  2 +-
 .../approved-plans-v1_4/q83.ansi/explain.txt       | 28 +++++++++++-----------
 .../approved-plans-v1_4/q83.ansi/simplified.txt    | 14 +++++------
 .../approved-plans-v1_4/q83.sf100.ansi/explain.txt | 28 +++++++++++-----------
 .../q83.sf100.ansi/simplified.txt                  | 14 +++++------
 5 files changed, 43 insertions(+), 43 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala
index 9cae2b622a7..2de44d6f349 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala
@@ -142,7 +142,7 @@ case class FunctionIdentifier(funcName: String, database: 
Option[String], catalo
   override val identifier: String = funcName
 
   def this(funcName: String) = this(funcName, None, None)
-  def this(table: String, database: Option[String]) = this(table, database, 
None)
+  def this(funcName: String, database: Option[String]) = this(funcName, 
database, None)
 
   override def toString: String = unquotedString
 }
diff --git 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/explain.txt
 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/explain.txt
index d281e59c727..905d29293a3 100644
--- 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/explain.txt
+++ 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/explain.txt
@@ -13,11 +13,11 @@ TakeOrderedAndProject (46)
       :     :              :  +- * BroadcastHashJoin Inner BuildRight (8)
       :     :              :     :- * Filter (3)
       :     :              :     :  +- * ColumnarToRow (2)
-      :     :              :     :     +- Scan parquet default.store_returns 
(1)
+      :     :              :     :     +- Scan parquet 
spark_catalog.default.store_returns (1)
       :     :              :     +- BroadcastExchange (7)
       :     :              :        +- * Filter (6)
       :     :              :           +- * ColumnarToRow (5)
-      :     :              :              +- Scan parquet default.item (4)
+      :     :              :              +- Scan parquet 
spark_catalog.default.item (4)
       :     :              +- ReusedExchange (10)
       :     +- BroadcastExchange (28)
       :        +- * HashAggregate (27)
@@ -29,7 +29,7 @@ TakeOrderedAndProject (46)
       :                       :  +- * BroadcastHashJoin Inner BuildRight (20)
       :                       :     :- * Filter (18)
       :                       :     :  +- * ColumnarToRow (17)
-      :                       :     :     +- Scan parquet 
default.catalog_returns (16)
+      :                       :     :     +- Scan parquet 
spark_catalog.default.catalog_returns (16)
       :                       :     +- ReusedExchange (19)
       :                       +- ReusedExchange (22)
       +- BroadcastExchange (43)
@@ -42,12 +42,12 @@ TakeOrderedAndProject (46)
                         :  +- * BroadcastHashJoin Inner BuildRight (35)
                         :     :- * Filter (33)
                         :     :  +- * ColumnarToRow (32)
-                        :     :     +- Scan parquet default.web_returns (31)
+                        :     :     +- Scan parquet 
spark_catalog.default.web_returns (31)
                         :     +- ReusedExchange (34)
                         +- ReusedExchange (37)
 
 
-(1) Scan parquet default.store_returns
+(1) Scan parquet spark_catalog.default.store_returns
 Output [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3]
 Batched: true
 Location: InMemoryFileIndex []
@@ -62,7 +62,7 @@ Input [3]: [sr_item_sk#1, sr_return_quantity#2, 
sr_returned_date_sk#3]
 Input [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3]
 Condition : isnotnull(sr_item_sk#1)
 
-(4) Scan parquet default.item
+(4) Scan parquet spark_catalog.default.item
 Output [2]: [i_item_sk#5, i_item_id#6]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/item]
@@ -119,7 +119,7 @@ Functions [1]: [sum(sr_return_quantity#2)]
 Aggregate Attributes [1]: [sum(sr_return_quantity#2)#10]
 Results [2]: [i_item_id#6 AS item_id#11, sum(sr_return_quantity#2)#10 AS 
sr_item_qty#12]
 
-(16) Scan parquet default.catalog_returns
+(16) Scan parquet spark_catalog.default.catalog_returns
 Output [3]: [cr_item_sk#13, cr_return_quantity#14, cr_returned_date_sk#15]
 Batched: true
 Location: InMemoryFileIndex []
@@ -189,7 +189,7 @@ Join condition: None
 Output [3]: [item_id#11, sr_item_qty#12, cr_item_qty#23]
 Input [4]: [item_id#11, sr_item_qty#12, item_id#22, cr_item_qty#23]
 
-(31) Scan parquet default.web_returns
+(31) Scan parquet spark_catalog.default.web_returns
 Output [3]: [wr_item_sk#24, wr_return_quantity#25, wr_returned_date_sk#26]
 Batched: true
 Location: InMemoryFileIndex []
@@ -271,20 +271,20 @@ BroadcastExchange (62)
    +- * BroadcastHashJoin LeftSemi BuildRight (60)
       :- * Filter (49)
       :  +- * ColumnarToRow (48)
-      :     +- Scan parquet default.date_dim (47)
+      :     +- Scan parquet spark_catalog.default.date_dim (47)
       +- BroadcastExchange (59)
          +- * Project (58)
             +- * BroadcastHashJoin LeftSemi BuildRight (57)
                :- * ColumnarToRow (51)
-               :  +- Scan parquet default.date_dim (50)
+               :  +- Scan parquet spark_catalog.default.date_dim (50)
                +- BroadcastExchange (56)
                   +- * Project (55)
                      +- * Filter (54)
                         +- * ColumnarToRow (53)
-                           +- Scan parquet default.date_dim (52)
+                           +- Scan parquet spark_catalog.default.date_dim (52)
 
 
-(47) Scan parquet default.date_dim
+(47) Scan parquet spark_catalog.default.date_dim
 Output [2]: [d_date_sk#7, d_date#39]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/date_dim]
@@ -298,7 +298,7 @@ Input [2]: [d_date_sk#7, d_date#39]
 Input [2]: [d_date_sk#7, d_date#39]
 Condition : isnotnull(d_date_sk#7)
 
-(50) Scan parquet default.date_dim
+(50) Scan parquet spark_catalog.default.date_dim
 Output [2]: [d_date#40, d_week_seq#41]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/date_dim]
@@ -307,7 +307,7 @@ ReadSchema: struct<d_date:date,d_week_seq:int>
 (51) ColumnarToRow [codegen id : 2]
 Input [2]: [d_date#40, d_week_seq#41]
 
-(52) Scan parquet default.date_dim
+(52) Scan parquet spark_catalog.default.date_dim
 Output [2]: [d_date#42, d_week_seq#43]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/date_dim]
diff --git 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/simplified.txt
 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/simplified.txt
index 29ff19d7450..f2e0a901c58 100644
--- 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/simplified.txt
+++ 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.ansi/simplified.txt
@@ -16,7 +16,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                               Filter [sr_item_sk]
                                 ColumnarToRow
                                   InputAdapter
-                                    Scan parquet default.store_returns 
[sr_item_sk,sr_return_quantity,sr_returned_date_sk]
+                                    Scan parquet 
spark_catalog.default.store_returns 
[sr_item_sk,sr_return_quantity,sr_returned_date_sk]
                                       SubqueryBroadcast [d_date_sk] #1
                                         BroadcastExchange #2
                                           WholeStageCodegen (3)
@@ -25,7 +25,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                                 Filter [d_date_sk]
                                                   ColumnarToRow
                                                     InputAdapter
-                                                      Scan parquet 
default.date_dim [d_date_sk,d_date]
+                                                      Scan parquet 
spark_catalog.default.date_dim [d_date_sk,d_date]
                                                 InputAdapter
                                                   BroadcastExchange #3
                                                     WholeStageCodegen (2)
@@ -33,7 +33,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                                         BroadcastHashJoin 
[d_week_seq,d_week_seq]
                                                           ColumnarToRow
                                                             InputAdapter
-                                                              Scan parquet 
default.date_dim [d_date,d_week_seq]
+                                                              Scan parquet 
spark_catalog.default.date_dim [d_date,d_week_seq]
                                                           InputAdapter
                                                             BroadcastExchange 
#4
                                                               
WholeStageCodegen (1)
@@ -41,14 +41,14 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                                                   Filter 
[d_date]
                                                                     
ColumnarToRow
                                                                       
InputAdapter
-                                                                        Scan 
parquet default.date_dim [d_date,d_week_seq]
+                                                                        Scan 
parquet spark_catalog.default.date_dim [d_date,d_week_seq]
                               InputAdapter
                                 BroadcastExchange #5
                                   WholeStageCodegen (1)
                                     Filter [i_item_sk,i_item_id]
                                       ColumnarToRow
                                         InputAdapter
-                                          Scan parquet default.item 
[i_item_sk,i_item_id]
+                                          Scan parquet 
spark_catalog.default.item [i_item_sk,i_item_id]
                           InputAdapter
                             ReusedExchange [d_date_sk] #2
             InputAdapter
@@ -66,7 +66,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                     Filter [cr_item_sk]
                                       ColumnarToRow
                                         InputAdapter
-                                          Scan parquet default.catalog_returns 
[cr_item_sk,cr_return_quantity,cr_returned_date_sk]
+                                          Scan parquet 
spark_catalog.default.catalog_returns 
[cr_item_sk,cr_return_quantity,cr_returned_date_sk]
                                             ReusedSubquery [d_date_sk] #1
                                     InputAdapter
                                       ReusedExchange [i_item_sk,i_item_id] #5
@@ -87,7 +87,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                 Filter [wr_item_sk]
                                   ColumnarToRow
                                     InputAdapter
-                                      Scan parquet default.web_returns 
[wr_item_sk,wr_return_quantity,wr_returned_date_sk]
+                                      Scan parquet 
spark_catalog.default.web_returns 
[wr_item_sk,wr_return_quantity,wr_returned_date_sk]
                                         ReusedSubquery [d_date_sk] #1
                                 InputAdapter
                                   ReusedExchange [i_item_sk,i_item_id] #5
diff --git 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/explain.txt
 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/explain.txt
index 885aace0dc8..e6a65be7ec4 100644
--- 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/explain.txt
+++ 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/explain.txt
@@ -13,12 +13,12 @@ TakeOrderedAndProject (46)
       :     :              :  +- * BroadcastHashJoin Inner BuildRight (5)
       :     :              :     :- * Filter (3)
       :     :              :     :  +- * ColumnarToRow (2)
-      :     :              :     :     +- Scan parquet default.store_returns 
(1)
+      :     :              :     :     +- Scan parquet 
spark_catalog.default.store_returns (1)
       :     :              :     +- ReusedExchange (4)
       :     :              +- BroadcastExchange (10)
       :     :                 +- * Filter (9)
       :     :                    +- * ColumnarToRow (8)
-      :     :                       +- Scan parquet default.item (7)
+      :     :                       +- Scan parquet spark_catalog.default.item 
(7)
       :     +- BroadcastExchange (28)
       :        +- * HashAggregate (27)
       :           +- Exchange (26)
@@ -29,7 +29,7 @@ TakeOrderedAndProject (46)
       :                       :  +- * BroadcastHashJoin Inner BuildRight (20)
       :                       :     :- * Filter (18)
       :                       :     :  +- * ColumnarToRow (17)
-      :                       :     :     +- Scan parquet 
default.catalog_returns (16)
+      :                       :     :     +- Scan parquet 
spark_catalog.default.catalog_returns (16)
       :                       :     +- ReusedExchange (19)
       :                       +- ReusedExchange (22)
       +- BroadcastExchange (43)
@@ -42,12 +42,12 @@ TakeOrderedAndProject (46)
                         :  +- * BroadcastHashJoin Inner BuildRight (35)
                         :     :- * Filter (33)
                         :     :  +- * ColumnarToRow (32)
-                        :     :     +- Scan parquet default.web_returns (31)
+                        :     :     +- Scan parquet 
spark_catalog.default.web_returns (31)
                         :     +- ReusedExchange (34)
                         +- ReusedExchange (37)
 
 
-(1) Scan parquet default.store_returns
+(1) Scan parquet spark_catalog.default.store_returns
 Output [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3]
 Batched: true
 Location: InMemoryFileIndex []
@@ -74,7 +74,7 @@ Join condition: None
 Output [2]: [sr_item_sk#1, sr_return_quantity#2]
 Input [4]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3, 
d_date_sk#5]
 
-(7) Scan parquet default.item
+(7) Scan parquet spark_catalog.default.item
 Output [2]: [i_item_sk#6, i_item_id#7]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/item]
@@ -119,7 +119,7 @@ Functions [1]: [sum(sr_return_quantity#2)]
 Aggregate Attributes [1]: [sum(sr_return_quantity#2)#10]
 Results [2]: [i_item_id#7 AS item_id#11, sum(sr_return_quantity#2)#10 AS 
sr_item_qty#12]
 
-(16) Scan parquet default.catalog_returns
+(16) Scan parquet spark_catalog.default.catalog_returns
 Output [3]: [cr_item_sk#13, cr_return_quantity#14, cr_returned_date_sk#15]
 Batched: true
 Location: InMemoryFileIndex []
@@ -189,7 +189,7 @@ Join condition: None
 Output [3]: [item_id#11, sr_item_qty#12, cr_item_qty#23]
 Input [4]: [item_id#11, sr_item_qty#12, item_id#22, cr_item_qty#23]
 
-(31) Scan parquet default.web_returns
+(31) Scan parquet spark_catalog.default.web_returns
 Output [3]: [wr_item_sk#24, wr_return_quantity#25, wr_returned_date_sk#26]
 Batched: true
 Location: InMemoryFileIndex []
@@ -271,20 +271,20 @@ BroadcastExchange (62)
    +- * BroadcastHashJoin LeftSemi BuildRight (60)
       :- * Filter (49)
       :  +- * ColumnarToRow (48)
-      :     +- Scan parquet default.date_dim (47)
+      :     +- Scan parquet spark_catalog.default.date_dim (47)
       +- BroadcastExchange (59)
          +- * Project (58)
             +- * BroadcastHashJoin LeftSemi BuildRight (57)
                :- * ColumnarToRow (51)
-               :  +- Scan parquet default.date_dim (50)
+               :  +- Scan parquet spark_catalog.default.date_dim (50)
                +- BroadcastExchange (56)
                   +- * Project (55)
                      +- * Filter (54)
                         +- * ColumnarToRow (53)
-                           +- Scan parquet default.date_dim (52)
+                           +- Scan parquet spark_catalog.default.date_dim (52)
 
 
-(47) Scan parquet default.date_dim
+(47) Scan parquet spark_catalog.default.date_dim
 Output [2]: [d_date_sk#5, d_date#39]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/date_dim]
@@ -298,7 +298,7 @@ Input [2]: [d_date_sk#5, d_date#39]
 Input [2]: [d_date_sk#5, d_date#39]
 Condition : isnotnull(d_date_sk#5)
 
-(50) Scan parquet default.date_dim
+(50) Scan parquet spark_catalog.default.date_dim
 Output [2]: [d_date#40, d_week_seq#41]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/date_dim]
@@ -307,7 +307,7 @@ ReadSchema: struct<d_date:date,d_week_seq:int>
 (51) ColumnarToRow [codegen id : 2]
 Input [2]: [d_date#40, d_week_seq#41]
 
-(52) Scan parquet default.date_dim
+(52) Scan parquet spark_catalog.default.date_dim
 Output [2]: [d_date#42, d_week_seq#43]
 Batched: true
 Location [not included in comparison]/{warehouse_dir}/date_dim]
diff --git 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/simplified.txt
 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/simplified.txt
index 7f385033637..0026109bc25 100644
--- 
a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/simplified.txt
+++ 
b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83.sf100.ansi/simplified.txt
@@ -16,7 +16,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                               Filter [sr_item_sk]
                                 ColumnarToRow
                                   InputAdapter
-                                    Scan parquet default.store_returns 
[sr_item_sk,sr_return_quantity,sr_returned_date_sk]
+                                    Scan parquet 
spark_catalog.default.store_returns 
[sr_item_sk,sr_return_quantity,sr_returned_date_sk]
                                       SubqueryBroadcast [d_date_sk] #1
                                         BroadcastExchange #2
                                           WholeStageCodegen (3)
@@ -25,7 +25,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                                 Filter [d_date_sk]
                                                   ColumnarToRow
                                                     InputAdapter
-                                                      Scan parquet 
default.date_dim [d_date_sk,d_date]
+                                                      Scan parquet 
spark_catalog.default.date_dim [d_date_sk,d_date]
                                                 InputAdapter
                                                   BroadcastExchange #3
                                                     WholeStageCodegen (2)
@@ -33,7 +33,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                                         BroadcastHashJoin 
[d_week_seq,d_week_seq]
                                                           ColumnarToRow
                                                             InputAdapter
-                                                              Scan parquet 
default.date_dim [d_date,d_week_seq]
+                                                              Scan parquet 
spark_catalog.default.date_dim [d_date,d_week_seq]
                                                           InputAdapter
                                                             BroadcastExchange 
#4
                                                               
WholeStageCodegen (1)
@@ -41,7 +41,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                                                   Filter 
[d_date]
                                                                     
ColumnarToRow
                                                                       
InputAdapter
-                                                                        Scan 
parquet default.date_dim [d_date,d_week_seq]
+                                                                        Scan 
parquet spark_catalog.default.date_dim [d_date,d_week_seq]
                               InputAdapter
                                 ReusedExchange [d_date_sk] #2
                           InputAdapter
@@ -50,7 +50,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                 Filter [i_item_sk,i_item_id]
                                   ColumnarToRow
                                     InputAdapter
-                                      Scan parquet default.item 
[i_item_sk,i_item_id]
+                                      Scan parquet spark_catalog.default.item 
[i_item_sk,i_item_id]
             InputAdapter
               BroadcastExchange #6
                 WholeStageCodegen (11)
@@ -66,7 +66,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                     Filter [cr_item_sk]
                                       ColumnarToRow
                                         InputAdapter
-                                          Scan parquet default.catalog_returns 
[cr_item_sk,cr_return_quantity,cr_returned_date_sk]
+                                          Scan parquet 
spark_catalog.default.catalog_returns 
[cr_item_sk,cr_return_quantity,cr_returned_date_sk]
                                             ReusedSubquery [d_date_sk] #1
                                     InputAdapter
                                       ReusedExchange [d_date_sk] #2
@@ -87,7 +87,7 @@ TakeOrderedAndProject 
[item_id,sr_item_qty,sr_dev,cr_item_qty,cr_dev,wr_item_qty
                                 Filter [wr_item_sk]
                                   ColumnarToRow
                                     InputAdapter
-                                      Scan parquet default.web_returns 
[wr_item_sk,wr_return_quantity,wr_returned_date_sk]
+                                      Scan parquet 
spark_catalog.default.web_returns 
[wr_item_sk,wr_return_quantity,wr_returned_date_sk]
                                         ReusedSubquery [d_date_sk] #1
                                 InputAdapter
                                   ReusedExchange [d_date_sk] #2


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to