This is an automated email from the ASF dual-hosted git repository.
sarutak pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.5 by this push:
new 439b7a1f74aa [SPARK-55033][SQL][3.5] Fix stringArgs of DSv2 writing
commands
439b7a1f74aa is described below
commit 439b7a1f74aaee5a790542e7a55725d1fa5345e9
Author: Cheng Pan <[email protected]>
AuthorDate: Tue Jan 20 03:35:18 2026 +0900
[SPARK-55033][SQL][3.5] Fix stringArgs of DSv2 writing commands
Backport #53794 to branch-3.5
### What changes were proposed in this pull request?
Replace `val stringArgs` with `def stringArgs` in v2 writing commands,
because each call of the method `stringArgs` should return a brand-new
`Iterator`, otherwise, subsequent consumers will get an empty result.
### Why are the changes needed?
Fix EXPLAIN / UI display.
### Does this PR introduce _any_ user-facing change?
Yes, for example, without this fix, I found that some `AppendData` nodes in
UI do not contain `Arguments: ...` where they should be, this patch makes them
display correctly consistently.
### How was this patch tested?
INSERT INTO an Iceberg table and check UI, before vs after.
<img width="389" height="86" alt="image"
src="https://github.com/user-attachments/assets/3bdb18e2-b605-497f-9431-ae416c340541"
/>
<img width="844" height="110" alt="image"
src="https://github.com/user-attachments/assets/4ed3ea3f-4fcd-4b93-a1fd-2714c3fead12"
/>
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #53855 from pan3793/SPARK-55033-3.5.
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Kousuke Saruta <[email protected]>
---
.../org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala | 4 ++--
.../spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
index d7669ac0b1d7..714f0f62a050 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
@@ -226,7 +226,7 @@ case class ReplaceData(
write: Option[Write] = None) extends RowLevelWrite {
override val isByName: Boolean = false
- override val stringArgs: Iterator[Any] = Iterator(table, query, write)
+ override def stringArgs: Iterator[Any] = Iterator(table, query, write)
override lazy val references: AttributeSet = query.outputSet
@@ -304,7 +304,7 @@ case class WriteDelta(
write: Option[DeltaWrite] = None) extends RowLevelWrite {
override val isByName: Boolean = false
- override val stringArgs: Iterator[Any] = Iterator(table, query, write)
+ override def stringArgs: Iterator[Any] = Iterator(table, query, write)
override lazy val references: AttributeSet = query.outputSet
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
index 12ab7c57ea51..fa74bace2360 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
@@ -278,7 +278,7 @@ case class ReplaceDataExec(
refreshCache: () => Unit,
write: Write) extends V2ExistingTableWriteExec {
- override val stringArgs: Iterator[Any] = Iterator(query, write)
+ override def stringArgs: Iterator[Any] = Iterator(query, write)
override protected def withNewChildInternal(newChild: SparkPlan):
ReplaceDataExec = {
copy(query = newChild)
@@ -294,7 +294,7 @@ case class WriteDeltaExec(
projections: WriteDeltaProjections,
write: DeltaWrite) extends V2ExistingTableWriteExec {
- override lazy val stringArgs: Iterator[Any] = Iterator(query, write)
+ override def stringArgs: Iterator[Any] = Iterator(query, write)
override lazy val writingTask: WritingSparkTask[_] = {
if (projections.metadataProjection.isDefined) {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]