Ted-Jiang commented on code in PR #3380: URL: https://github.com/apache/arrow-datafusion/pull/3380#discussion_r964334612
########## datafusion/core/src/physical_plan/file_format/row_filter.rs: ########## @@ -0,0 +1,367 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use arrow::array::{Array, BooleanArray}; +use arrow::compute::prep_null_mask_filter; +use arrow::datatypes::{DataType, Field, Schema}; +use arrow::error::{ArrowError, Result as ArrowResult}; +use arrow::record_batch::RecordBatch; +use datafusion_common::{Column, Result, ScalarValue, ToDFSchema}; +use datafusion_expr::expr_rewriter::{ExprRewritable, ExprRewriter, RewriteRecursion}; + +use datafusion_expr::{Expr, Operator}; +use datafusion_physical_expr::execution_props::ExecutionProps; +use datafusion_physical_expr::{create_physical_expr, PhysicalExpr}; +use parquet::arrow::arrow_reader::{ArrowPredicate, RowFilter}; +use parquet::arrow::ProjectionMask; +use parquet::file::metadata::ParquetMetaData; +use std::collections::HashSet; +use std::sync::Arc; + +/// A predicate which can be passed to `ParquetRecordBatchStream` to perform row-level +/// filtering during parquet decoding. +#[derive(Debug)] +pub(crate) struct DatafusionArrowPredicate { + physical_expr: Arc<dyn PhysicalExpr>, + projection: ProjectionMask, +} + +impl DatafusionArrowPredicate { + pub fn try_new( + candidate: FilterCandidate, + schema: &Schema, + metadata: &ParquetMetaData, + ) -> Result<Self> { + let props = ExecutionProps::default(); + + let schema = schema.project(&candidate.projection)?; + let df_schema = schema.clone().to_dfschema()?; + + let physical_expr = + create_physical_expr(&candidate.expr, &df_schema, &schema, &props)?; + + Ok(Self { + physical_expr, + projection: ProjectionMask::roots( + metadata.file_metadata().schema_descr(), + candidate.projection, + ), + }) + } +} + +impl ArrowPredicate for DatafusionArrowPredicate { + fn projection(&self) -> &ProjectionMask { + &self.projection + } + + fn evaluate(&mut self, batch: RecordBatch) -> ArrowResult<BooleanArray> { + match self + .physical_expr + .evaluate(&batch) + .map(|v| v.into_array(batch.num_rows())) + { + Ok(array) => { + if let Some(mask) = array.as_any().downcast_ref::<BooleanArray>() { + let mask = match mask.null_count() { + 0 => BooleanArray::from(mask.data().clone()), + _ => prep_null_mask_filter(mask), + }; + + Ok(mask) + } else { + Err(ArrowError::ComputeError( + "Unexpected result of predicate evaluation, expected BooleanArray".to_owned(), + )) + } + } + Err(e) => Err(ArrowError::ComputeError(format!( + "Error evaluating filter predicate: {:?}", + e + ))), + } + } +} + +/// A candidate expression for creating a `RowFilter` contains the +/// expression as well as data to estimate the cost of evaluating +/// the resulting expression. +pub(crate) struct FilterCandidate { + expr: Expr, + required_bytes: usize, + can_use_index: bool, + projection: Vec<usize>, +} + +/// Helper to build a `FilterCandidate`. This will do several things +/// 1. Determine the columns required to evaluate the expression +/// 2. Calculate data required to estimate the cost of evaluating the filter +/// 3. Rewrite column expressions in the predicate which reference columns not in the particular file schema. +/// This is relevant in the case where we have determined the table schema by merging all individual file schemas +/// and any given file may or may not contain all columns in the merged schema. If a particular column is not present +/// we replace the column expression with a literal expression that produces a null value. +struct FilterCandidateBuilder<'a> { + expr: Expr, + file_schema: &'a Schema, + table_schema: &'a Schema, + required_columns: HashSet<Column>, + required_column_indices: Vec<usize>, + non_primitive_columns: bool, + projected_columns: bool, +} + +impl<'a> FilterCandidateBuilder<'a> { + pub fn new(expr: Expr, file_schema: &'a Schema, table_schema: &'a Schema) -> Self { + Self { + expr, + file_schema, + table_schema, + required_columns: HashSet::new(), + required_column_indices: vec![], + non_primitive_columns: false, + projected_columns: false, + } + } + + pub fn build( + mut self, + metadata: &ParquetMetaData, + ) -> Result<Option<FilterCandidate>> { + let expr = self.expr.clone(); + let expr = expr.rewrite(&mut self)?; Review Comment: why should call this func 🤔 -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
