ion-elgreco commented on issue #9895:
URL:
https://github.com/apache/arrow-datafusion/issues/9895#issuecomment-2029639071
@mustafasrepo not sure if this is the full plan, but this is what I get when
I do dbg!,
```rust
crates/core/src/operations/delete.rs:158:5] filter.clone() = FilterExec {
predicate: NotExpr {
arg: BinaryExpr {
left: BinaryExpr {
left: GetIndexedFieldExpr {
arg: Column {
name: "props",
index: 1,
},
field: NamedStructField {
name: Utf8("a"),
},
},
op: Eq,
right: Literal {
value: Utf8("2021-02-02"),
},
},
op: IsNotDistinctFrom,
right: Literal {
value: Boolean(true),
},
},
},
input: DeltaScan {
table_uri: "memory://",
config: DeltaScanConfig {
file_column_name: None,
wrap_partition_values: false,
enable_parquet_pushdown: false,
},
parquet_scan: ParquetExec {
pushdown_filters: None,
reorder_filters: None,
enable_page_index: None,
enable_bloom_filter: None,
base_config: object_store_url=ObjectStoreUrl { url: Url {
scheme: "delta-rs", cannot_be_a_base: false, username: "", password: None,
host: Some(Domain("memory--")), port: None, path: "/", query: None, fragment:
None } }, statistics=Statistics { num_rows: Exact(4), total_byte_size:
Inexact(898), column_statistics: [ColumnStatistics { null_count: Exact(0),
max_value: Absent, min_value: Absent, distinct_count: Absent },
ColumnStatistics { null_count: Absent, max_value: Absent, min_value: Absent,
distinct_count: Absent }] }, file_groups={1 group:
[[part-00001-ae8f2178-b18d-4bc8-b333-3e73be1076cb-c000.snappy.parquet]]},
projection=[id, props],
projected_statistics: Statistics {
num_rows: Exact(4),
total_byte_size: Inexact(898),
column_statistics: [
ColumnStatistics {
null_count: Exact(0),
max_value: Absent,
min_value: Absent,
distinct_count: Absent,
},
ColumnStatistics {
null_count: Absent,
max_value: Absent,
min_value: Absent,
distinct_count: Absent,
},
],
},
projected_schema: Schema {
fields: [
Field {
name: "id",
data_type: Utf8,
nullable: true,
dict_id: 0,
dict_is_ordered: false,
metadata: {},
},
Field {
name: "props",
data_type: Struct(
[
Field {
name: "a",
data_type: Utf8,
nullable: true,
dict_id: 0,
dict_is_ordered: false,
metadata: {},
},
],
),
nullable: true,
dict_id: 0,
dict_is_ordered: false,
metadata: {},
},
],
metadata: {},
},
projected_output_ordering: [],
metrics: ExecutionPlanMetricsSet {
inner: Mutex {
data: MetricsSet {
metrics: [
Metric {
value: Count {
name: "num_predicate_creation_errors",
count: Count {
value: 0,
},
},
labels: [],
partition: None,
},
],
},
},
},
predicate: None,
pruning_predicate: None,
page_pruning_predicate: None,
metadata_size_hint: None,
parquet_file_reader_factory: None,
},
logical_schema: Schema {
fields: [
Field {
name: "id",
data_type: Utf8,
nullable: true,
dict_id: 0,
dict_is_ordered: false,
metadata: {},
},
Field {
name: "props",
data_type: Struct(
[
Field {
name: "a",
data_type: Utf8,
nullable: true,
dict_id: 0,
dict_is_ordered: false,
metadata: {},
},
],
),
nullable: true,
dict_id: 0,
dict_is_ordered: false,
metadata: {},
},
],
metadata: {},
},
},
metrics: ExecutionPlanMetricsSet {
inner: Mutex {
data: MetricsSet {
metrics: [],
},
},
},
default_selectivity: 20,
}
```
This is by the way the `fn children`, I don't see anything weird if I look
at what the trait shows
```rust
pub struct DeltaScan {
/// The URL of the ObjectStore root
pub table_uri: String,
/// Column that contains an index that maps to the original metadata Add
pub config: DeltaScanConfig,
/// The parquet scan to wrap
pub parquet_scan: Arc<dyn ExecutionPlan>,
/// The schema of the table to be used when evaluating expressions
pub logical_schema: Arc<ArrowSchema>,
}
fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {
vec![self.parquet_scan.clone()]
}
fn with_new_children(
self: Arc<Self>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DataFusionResult<Arc<dyn ExecutionPlan>> {
ExecutionPlan::with_new_children(self.parquet_scan.clone(), children)
}
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]