This is an automated email from the ASF dual-hosted git repository. milenkovicm pushed a commit to branch main in repository https://gitbox.apache.org/repos/asf/datafusion-ballista.git
The following commit(s) were added to refs/heads/main by this push:
new ec4a720d Make `DisplayAs` consistent and more readable ... (#1347)
ec4a720d is described below
commit ec4a720d62de6ffc7072b264ea649a2f63f56b50
Author: Marko Milenković <[email protected]>
AuthorDate: Tue Dec 9 14:35:44 2025 +0000
Make `DisplayAs` consistent and more readable ... (#1347)
... for Shuffle[Reader|Writer|Unresolved].
---
ballista/client/tests/context_checks.rs | 74 +++++++++++-----------
.../core/src/execution_plans/shuffle_reader.rs | 4 +-
.../core/src/execution_plans/shuffle_writer.rs | 14 +++-
.../core/src/execution_plans/unresolved_shuffle.rs | 4 +-
4 files changed, 53 insertions(+), 43 deletions(-)
diff --git a/ballista/client/tests/context_checks.rs
b/ballista/client/tests/context_checks.rs
index a8508f72..deba3bc3 100644
--- a/ballista/client/tests/context_checks.rs
+++ b/ballista/client/tests/context_checks.rs
@@ -998,43 +998,43 @@ mod supported {
.unwrap();
let expected: Vec<&str> = vec![
-
"+------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+",
- "| plan_type | plan
|",
-
"+------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+",
- "| logical_plan | Projection: count(Int64(1)) AS count(*),
id
|",
- "| | Aggregate: groupBy=[[id]],
aggr=[[count(Int64(1))]]
|",
- "| | Projection:
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)),depth=1)
AS UNNEST(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5))) AS id |",
- "| | Unnest:
lists[__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))|depth=1]
structs[] |",
- "| | Projection: List([1, 2, 3, 4,
5]) AS
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))
|",
- "| | EmptyRelation: rows=1
|",
- "| physical_plan | ProjectionExec: expr=[count(Int64(1))@1
as count(*), id@0 as id]
|",
- "| | AggregateExec: mode=FinalPartitioned,
gby=[id@0 as id], aggr=[count(Int64(1))]
|",
- "| | CoalesceBatchesExec:
target_batch_size=8192
|",
- "| | RepartitionExec:
partitioning=Hash([id@0], 16), input_partitions=1
|",
- "| | AggregateExec: mode=Partial,
gby=[id@0 as id], aggr=[count(Int64(1))]
|",
- "| | ProjectionExec:
expr=[__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)),depth=1)@0
as id] |",
- "| | UnnestExec
|",
- "| | ProjectionExec: expr=[[1,
2, 3, 4, 5] as
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))]
|",
- "| | PlaceholderRowExec
|",
- "| |
|",
- "| distributed_plan | =========ResolvedStage[stage_id=1.0,
partitions=1]=========
|",
- "| | ShuffleWriterExec:
partitioning:Some(Hash([Column { name: \"id\", index: 0 }], 16))
|",
- "| | AggregateExec: mode=Partial, gby=[id@0
as id], aggr=[count(Int64(1))]
|",
- "| | ProjectionExec:
expr=[__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)),depth=1)@0
as id] |",
- "| | UnnestExec
|",
- "| | ProjectionExec: expr=[[1, 2, 3,
4, 5] as
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))]
|",
- "| | PlaceholderRowExec
|",
- "| |
|",
- "| | =========UnResolvedStage[stage_id=2.0,
children=1]=========
|",
- "| | Inputs{1: StageOutput {
partition_locations: {}, complete: false }}
|",
- "| | ShuffleWriterExec: partitioning:None
|",
- "| | ProjectionExec:
expr=[count(Int64(1))@1 as count(*), id@0 as id]
|",
- "| | AggregateExec:
mode=FinalPartitioned, gby=[id@0 as id], aggr=[count(Int64(1))]
|",
- "| | CoalesceBatchesExec:
target_batch_size=8192
|",
- "| | UnresolvedShuffleExec:
partitioning=Hash([Column { name: \"id\", index: 0 }], 16)
|",
- "| |
|",
- "| |
|",
-
"+------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+",
+
"+------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+",
+ "| plan_type | plan
|",
+
"+------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+",
+ "| logical_plan | Projection: count(Int64(1)) AS count(*), id
|",
+ "| | Aggregate: groupBy=[[id]],
aggr=[[count(Int64(1))]]
|",
+ "| | Projection:
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)),depth=1)
AS UNNEST(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5))) AS id |",
+ "| | Unnest:
lists[__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))|depth=1]
structs[] |",
+ "| | Projection: List([1, 2, 3, 4, 5]) AS
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))
|",
+ "| | EmptyRelation: rows=1
|",
+ "| physical_plan | ProjectionExec: expr=[count(Int64(1))@1 as
count(*), id@0 as id]
|",
+ "| | AggregateExec: mode=FinalPartitioned,
gby=[id@0 as id], aggr=[count(Int64(1))]
|",
+ "| | CoalesceBatchesExec:
target_batch_size=8192
|",
+ "| | RepartitionExec:
partitioning=Hash([id@0], 16), input_partitions=1
|",
+ "| | AggregateExec: mode=Partial,
gby=[id@0 as id], aggr=[count(Int64(1))]
|",
+ "| | ProjectionExec:
expr=[__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)),depth=1)@0
as id] |",
+ "| | UnnestExec
|",
+ "| | ProjectionExec: expr=[[1, 2,
3, 4, 5] as
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))]
|",
+ "| | PlaceholderRowExec
|",
+ "| |
|",
+ "| distributed_plan | =========ResolvedStage[stage_id=1.0,
partitions=1]=========
|",
+ "| | ShuffleWriterExec: partitioning:
Hash([id@0], 16)
|",
+ "| | AggregateExec: mode=Partial, gby=[id@0 as
id], aggr=[count(Int64(1))]
|",
+ "| | ProjectionExec:
expr=[__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)),depth=1)@0
as id] |",
+ "| | UnnestExec
|",
+ "| | ProjectionExec: expr=[[1, 2, 3, 4,
5] as
__unnest_placeholder(make_array(Int64(1),Int64(2),Int64(3),Int64(4),Int64(5)))]
|",
+ "| | PlaceholderRowExec
|",
+ "| |
|",
+ "| | =========UnResolvedStage[stage_id=2.0,
children=1]=========
|",
+ "| | Inputs{1: StageOutput { partition_locations:
{}, complete: false }}
|",
+ "| | ShuffleWriterExec: partitioning: None
|",
+ "| | ProjectionExec: expr=[count(Int64(1))@1 as
count(*), id@0 as id]
|",
+ "| | AggregateExec: mode=FinalPartitioned,
gby=[id@0 as id], aggr=[count(Int64(1))]
|",
+ "| | CoalesceBatchesExec:
target_batch_size=8192
|",
+ "| | UnresolvedShuffleExec: partitioning:
Hash([id@0], 16)
|",
+ "| |
|",
+ "| |
|",
+
"+------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+",
];
assert_batches_eq!(expected, &result);
}
diff --git a/ballista/core/src/execution_plans/shuffle_reader.rs
b/ballista/core/src/execution_plans/shuffle_reader.rs
index 617654ec..0635d310 100644
--- a/ballista/core/src/execution_plans/shuffle_reader.rs
+++ b/ballista/core/src/execution_plans/shuffle_reader.rs
@@ -103,12 +103,12 @@ impl DisplayAs for ShuffleReaderExec {
DisplayFormatType::Default | DisplayFormatType::Verbose => {
write!(
f,
- "ShuffleReaderExec: partitioning={:?}",
+ "ShuffleReaderExec: partitioning: {}",
self.properties.partitioning,
)
}
DisplayFormatType::TreeRender => {
- write!(f, "partitioning={:?}", self.properties.partitioning)
+ write!(f, "partitioning={}", self.properties.partitioning)
}
}
}
diff --git a/ballista/core/src/execution_plans/shuffle_writer.rs
b/ballista/core/src/execution_plans/shuffle_writer.rs
index 31c2c1a6..c1be52e3 100644
--- a/ballista/core/src/execution_plans/shuffle_writer.rs
+++ b/ballista/core/src/execution_plans/shuffle_writer.rs
@@ -359,12 +359,22 @@ impl DisplayAs for ShuffleWriterExec {
DisplayFormatType::Default | DisplayFormatType::Verbose => {
write!(
f,
- "ShuffleWriterExec: partitioning:{:?}",
+ "ShuffleWriterExec: partitioning: {}",
self.shuffle_output_partitioning
+ .as_ref()
+ .map(|p| p.to_string())
+ .unwrap_or("None".to_string())
)
}
DisplayFormatType::TreeRender => {
- write!(f, "partitioning={:?}",
self.shuffle_output_partitioning)
+ write!(
+ f,
+ "partitioning={}",
+ self.shuffle_output_partitioning
+ .as_ref()
+ .map(|p| p.to_string())
+ .unwrap_or("None".to_string())
+ )
}
}
}
diff --git a/ballista/core/src/execution_plans/unresolved_shuffle.rs
b/ballista/core/src/execution_plans/unresolved_shuffle.rs
index d70c66fa..2b709d4a 100644
--- a/ballista/core/src/execution_plans/unresolved_shuffle.rs
+++ b/ballista/core/src/execution_plans/unresolved_shuffle.rs
@@ -72,14 +72,14 @@ impl DisplayAs for UnresolvedShuffleExec {
DisplayFormatType::Default | DisplayFormatType::Verbose => {
write!(
f,
- "UnresolvedShuffleExec: partitioning={:?}",
+ "UnresolvedShuffleExec: partitioning: {}",
self.properties().output_partitioning()
)
}
DisplayFormatType::TreeRender => {
write!(
f,
- "partitioning={:?}",
+ "partitioning={}",
self.properties().output_partitioning()
)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]
