This is an automated email from the ASF dual-hosted git repository.

ptoth pushed a commit to branch branch-4.1
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.1 by this push:
     new ad9766e6a100 [SPARK-55033][SQL] Fix stringArgs of DSv2 writing commands
ad9766e6a100 is described below

commit ad9766e6a10062e1464ebcc3d12208135a3fa6ee
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Jan 19 10:12:13 2026 +0100

    [SPARK-55033][SQL] Fix stringArgs of DSv2 writing commands
    
    ### What changes were proposed in this pull request?
    
    Replace `val stringArgs` with `def stringArgs` in v2 writing commands, 
because each call of the method `stringArgs` should return a brand-new 
`Iterator`, otherwise, subsequent consumers will get an empty result.
    
    ### Why are the changes needed?
    
    Fix EXPLAIN / UI display.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, for example, without this fix, I found that some `AppendData` nodes in 
UI do not contain `Arguments: ...` where they should be, this patch makes them 
display correctly consistently.
    
    ### How was this patch tested?
    
    INSERT INTO an Iceberg table and check UI, before vs after.
    
    <img width="389" height="86" alt="image" 
src="https://github.com/user-attachments/assets/3bdb18e2-b605-497f-9431-ae416c340541";
 />
    
    <img width="844" height="110" alt="image" 
src="https://github.com/user-attachments/assets/4ed3ea3f-4fcd-4b93-a1fd-2714c3fead12";
 />
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #53794 from pan3793/SPARK-55033.
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Peter Toth <[email protected]>
    (cherry picked from commit 7a0ea39fd6a902e2b4f37e80ce82ba69d84185d9)
    Signed-off-by: Peter Toth <[email protected]>
---
 .../org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala      | 4 ++--
 .../spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala  | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
index fab64d771093..17c5eb1593bf 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
@@ -257,7 +257,7 @@ case class ReplaceData(
     write: Option[Write] = None) extends RowLevelWrite {
 
   override val isByName: Boolean = false
-  override val stringArgs: Iterator[Any] = Iterator(table, query, write)
+  override def stringArgs: Iterator[Any] = Iterator(table, query, write)
 
   override lazy val references: AttributeSet = query.outputSet
 
@@ -339,7 +339,7 @@ case class WriteDelta(
     write: Option[DeltaWrite] = None) extends RowLevelWrite {
 
   override val isByName: Boolean = false
-  override val stringArgs: Iterator[Any] = Iterator(table, query, write)
+  override def stringArgs: Iterator[Any] = Iterator(table, query, write)
 
   override lazy val references: AttributeSet = query.outputSet
 
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
index 3e4a2f792a1c..464f0d9658d1 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
@@ -355,7 +355,7 @@ case class WriteToDataSourceV2Exec(
     query: SparkPlan,
     writeMetrics: Seq[CustomMetric]) extends V2TableWriteExec {
 
-  override val stringArgs: Iterator[Any] = Iterator(batchWrite, query)
+  override def stringArgs: Iterator[Any] = Iterator(batchWrite, query)
 
   override val customMetrics: Map[String, SQLMetric] = writeMetrics.map { 
customMetric =>
     customMetric.name() -> SQLMetrics.createV2CustomMetric(sparkContext, 
customMetric)
@@ -375,7 +375,7 @@ trait V2ExistingTableWriteExec extends V2TableWriteExec {
   def refreshCache: () => Unit
   def write: Write
 
-  override val stringArgs: Iterator[Any] = Iterator(query, write)
+  override def stringArgs: Iterator[Any] = Iterator(query, write)
 
   override val customMetrics: Map[String, SQLMetric] =
     write.supportedCustomMetrics().map { customMetric =>


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to