This is an automated email from the ASF dual-hosted git repository.
csy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/auron.git
The following commit(s) were added to refs/heads/master by this push:
new ce8b558a [AURON #1911][clippy] Enable -D warnings and resolve related
issues (#1922)
ce8b558a is described below
commit ce8b558aed30a6833407aa8826895c3bca55e2fd
Author: yew1eb <[email protected]>
AuthorDate: Tue Jan 20 21:33:28 2026 +0800
[AURON #1911][clippy] Enable -D warnings and resolve related issues (#1922)
# Which issue does this PR close?
Closes #1911
# Rationale for this change
# What changes are included in this PR?
1. Enable `-D warnings` for Clippy in CI, and set `workspace.lints =
true` for all sub-crates to align with unified workspace lint config.
2. Resolve auto-fixable lint issues via `cargo clippy --fix`.
3. Manually exempt existing panic-related code with the
`#[allow(clippy::panic)]` annotation
# Are there any user-facing changes?
# How was this patch tested?
---
.github/workflows/tpcds-reusable.yml | 3 +-
Cargo.toml | 85 +++++++++++
dev/mvn-build-helper/build-native.sh | 3 +-
native-engine/auron-jni-bridge/Cargo.toml | 3 +
native-engine/auron-memmgr/Cargo.toml | 3 +
native-engine/auron-memmgr/src/spill.rs | 7 +-
native-engine/auron-planner/Cargo.toml | 3 +
native-engine/auron-planner/build.rs | 4 +-
native-engine/auron-planner/src/error.rs | 16 +--
native-engine/auron-planner/src/lib.rs | 3 +-
native-engine/auron-planner/src/planner.rs | 55 +++----
native-engine/auron/Cargo.toml | 3 +
native-engine/auron/src/alloc.rs | 2 +-
native-engine/auron/src/exec.rs | 4 +-
native-engine/auron/src/logging.rs | 8 +-
native-engine/auron/src/rt.rs | 4 +-
native-engine/datafusion-ext-commons/Cargo.toml | 3 +
.../datafusion-ext-commons/src/arrow/cast.rs | 6 +-
.../datafusion-ext-commons/src/arrow/coalesce.rs | 20 +--
.../src/arrow/eq_comparator.rs | 160 ++++++++++-----------
.../datafusion-ext-commons/src/arrow/selection.rs | 14 +-
.../datafusion-ext-commons/src/hadoop_fs.rs | 2 +-
.../datafusion-ext-commons/src/hash/xxhash.rs | 2 +-
.../datafusion-ext-commons/src/io/batch_serde.rs | 18 +--
.../src/io/ipc_compression.rs | 2 +-
native-engine/datafusion-ext-commons/src/io/mod.rs | 2 +-
.../datafusion-ext-commons/src/io/scalar_serde.rs | 2 +-
native-engine/datafusion-ext-commons/src/lib.rs | 4 +-
.../datafusion-ext-commons/src/spark_bit_array.rs | 2 +-
.../datafusion-ext-commons/src/spark_hash.rs | 14 +-
native-engine/datafusion-ext-exprs/Cargo.toml | 3 +
.../src/bloom_filter_might_contain.rs | 4 +-
.../datafusion-ext-exprs/src/get_indexed_field.rs | 4 +-
.../datafusion-ext-exprs/src/get_map_value.rs | 16 +--
.../datafusion-ext-exprs/src/named_struct.rs | 2 +-
native-engine/datafusion-ext-exprs/src/row_num.rs | 2 +-
.../src/spark_scalar_subquery_wrapper.rs | 4 +-
.../datafusion-ext-exprs/src/spark_udf_wrapper.rs | 2 +-
native-engine/datafusion-ext-functions/Cargo.toml | 3 +
.../src/brickhouse/array_union.rs | 1 -
native-engine/datafusion-ext-functions/src/lib.rs | 1 +
.../datafusion-ext-functions/src/spark_bround.rs | 4 +-
.../src/spark_check_overflow.rs | 3 +-
.../datafusion-ext-functions/src/spark_crypto.rs | 1 +
.../datafusion-ext-functions/src/spark_dates.rs | 2 +-
.../src/spark_get_json_object.rs | 6 +-
.../datafusion-ext-functions/src/spark_hash.rs | 2 +-
.../datafusion-ext-functions/src/spark_initcap.rs | 2 +-
.../src/spark_make_decimal.rs | 3 +-
.../datafusion-ext-functions/src/spark_round.rs | 1 +
.../datafusion-ext-functions/src/spark_strings.rs | 2 +-
native-engine/datafusion-ext-plans/Cargo.toml | 3 +
.../src/agg/spark_udaf_wrapper.rs | 1 +
.../src/common/execution_context.rs | 2 +
.../src/common/row_null_checker.rs | 1 +
.../src/shuffle/buffered_data.rs | 1 +
56 files changed, 318 insertions(+), 215 deletions(-)
diff --git a/.github/workflows/tpcds-reusable.yml
b/.github/workflows/tpcds-reusable.yml
index 4eab0ac3..5110080d 100644
--- a/.github/workflows/tpcds-reusable.yml
+++ b/.github/workflows/tpcds-reusable.yml
@@ -120,9 +120,8 @@ jobs:
clippy
- name: Cargo clippy
- # First eliminate unwrap; then enable -D warnings to enforce all
default lints.
run: |
- cargo clippy --all-targets --workspace -- -A warnings -A
clippy::all -D clippy::unwrap_used
+ cargo clippy --all-targets --workspace -- -D warnings
- name: Cargo test
run: |
diff --git a/Cargo.toml b/Cargo.toml
index d5cd6648..692b9379 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -27,10 +27,95 @@ members = [
"native-engine/auron-memmgr",
]
+[workspace.lints.rust]
+# Pending processing (temporarily allow)
+unused_variables = "allow"
+dead_code = "allow"
+unused_imports = "allow"
+unused_must_use = "allow"
+deprecated = "allow"
+
[workspace.lints.clippy]
unwrap_used = "deny"
panic = "deny"
+# Pending processing (temporarily allow)
+# Unwrap/Error Handling
+unnecessary_unwrap = "allow"
+needless_question_mark = "allow"
+unnecessary_literal_unwrap = "allow"
+bind_instead_of_map = "allow"
+expect_fun_call = "allow"
+io_other_error = "allow"
+unnecessary_fallible_conversions = "allow"
+
+# Unsafe/Memory
+missing_transmute_annotations = "allow"
+declare_interior_mutable_const = "allow"
+borrow_interior_mutable_const = "allow"
+uninit_vec = "allow"
+macro_metavars_in_unsafe = "allow"
+ptr_arg = "allow"
+borrowed_box = "allow"
+
+# Iterator/Collection
+manual_flatten = "allow"
+iter_cloned_collect = "allow"
+into_iter_on_ref = "allow"
+box_collection = "allow"
+useless_vec = "allow"
+len_without_is_empty = "allow"
+len_zero = "allow"
+mem_replace_option_with_none = "allow"
+get_first = "allow"
+
+# Loop/Control Flow
+needless_range_loop = "allow"
+while_let_loop = "allow"
+while_let_on_iterator = "allow"
+explicit_counter_loop = "allow"
+
+# Format/String
+useless_format = "allow"
+uninlined_format_args = "allow"
+to_string_in_format_args = "allow"
+
+# Code Style/Redundancy (Part 1)
+needless_borrow = "allow"
+needless_return = "allow"
+redundant_closure = "allow"
+redundant_locals = "allow"
+redundant_pattern_matching = "allow"
+
+# Code Style/Redundancy (Part 2)
+unnecessary_cast = "allow"
+unnecessary_to_owned = "allow"
+useless_asref = "allow"
+clone_on_copy = "allow"
+unit_arg = "allow"
+manual_repeat_n = "allow"
+manual_div_ceil = "allow"
+
+# Condition/Logic
+collapsible_if = "allow"
+collapsible_else_if = "allow"
+if_same_then_else = "allow"
+match_like_matches_macro = "allow"
+explicit_auto_deref = "allow"
+bool_assert_comparison = "allow"
+
+# Naming/Structure/Remaining
+upper_case_acronyms = "allow"
+module_inception = "allow"
+too_many_arguments = "allow"
+should_implement_trait = "allow"
+extra_unused_lifetimes = "allow"
+crate_in_macro_def = "allow"
+int_plus_one = "allow"
+derived_hash_with_manual_eq = "allow"
+approx_constant = "allow"
+op_ref = "allow"
+
[profile.release]
opt-level = 3
lto = true
diff --git a/dev/mvn-build-helper/build-native.sh
b/dev/mvn-build-helper/build-native.sh
index 8cc62ad7..548ab9a9 100755
--- a/dev/mvn-build-helper/build-native.sh
+++ b/dev/mvn-build-helper/build-native.sh
@@ -94,8 +94,7 @@ if [ ! -f "$cache_libpath" ] || [ "$new_checksum" !=
"$old_checksum" ]; then
cargo fmt --all -q -- 2>&1
echo "Running cargo clippy..."
- # First eliminate unwrap; then enable -D warnings to enforce all default
lints.
- cargo clippy --all-targets --workspace -- -A warnings -A clippy::all -D
clippy::unwrap_used 2>&1
+ cargo clippy --all-targets --workspace -- -D warnings 2>&1
echo "Building native with [$profile] profile..."
cargo build --profile="$profile" $features_arg --verbose --locked --frozen
2>&1
diff --git a/native-engine/auron-jni-bridge/Cargo.toml
b/native-engine/auron-jni-bridge/Cargo.toml
index 679ed6fc..fbbbf4a2 100644
--- a/native-engine/auron-jni-bridge/Cargo.toml
+++ b/native-engine/auron-jni-bridge/Cargo.toml
@@ -21,6 +21,9 @@ version = "0.1.0"
edition = "2024"
resolver = "1"
+[lints]
+workspace = true
+
[dependencies]
datafusion = { workspace = true }
jni = { workspace = true }
diff --git a/native-engine/auron-memmgr/Cargo.toml
b/native-engine/auron-memmgr/Cargo.toml
index a5aff2ae..8b09480e 100644
--- a/native-engine/auron-memmgr/Cargo.toml
+++ b/native-engine/auron-memmgr/Cargo.toml
@@ -20,6 +20,9 @@ name = "auron-memmgr"
version = "0.1.0"
edition = "2024"
+[lints]
+workspace = true
+
[dependencies]
auron-jni-bridge = { workspace = true }
datafusion = { workspace = true }
diff --git a/native-engine/auron-memmgr/src/spill.rs
b/native-engine/auron-memmgr/src/spill.rs
index 3e7ef0b4..0ed19383 100644
--- a/native-engine/auron-memmgr/src/spill.rs
+++ b/native-engine/auron-memmgr/src/spill.rs
@@ -79,7 +79,7 @@ fn spill_compression_codec() -> &'static str {
if is_jni_bridge_inited() {
conf::SPILL_COMPRESSION_CODEC.value()
} else {
- Ok(format!("lz4")) // for testing
+ Ok("lz4".to_string()) // for testing
}
})
.expect("error reading spark.auron.spill.compression.codec")
@@ -168,10 +168,7 @@ impl Drop for FileSpill {
.add_duration(Duration::from_nanos(self.1.mem_spill_iotime.value()
as u64));
if let Some(file_path) = &self.2 {
if let Err(e) = fs::remove_file(file_path) {
- warn!(
- "Was unable to delete spill file: {}. error: {}",
- file_path, e
- );
+ warn!("Was unable to delete spill file: {file_path}. error:
{e}");
}
}
}
diff --git a/native-engine/auron-planner/Cargo.toml
b/native-engine/auron-planner/Cargo.toml
index 72d30be3..a052f429 100644
--- a/native-engine/auron-planner/Cargo.toml
+++ b/native-engine/auron-planner/Cargo.toml
@@ -20,6 +20,9 @@ name = "auron-planner"
version = "0.1.0"
edition = "2024"
+[lints]
+workspace = true
+
[features]
default = ["prost/no-recursion-limit"]
diff --git a/native-engine/auron-planner/build.rs
b/native-engine/auron-planner/build.rs
index afcf8515..8c1e6620 100644
--- a/native-engine/auron-planner/build.rs
+++ b/native-engine/auron-planner/build.rs
@@ -40,10 +40,10 @@ fn main() -> Result<(), String> {
}
}
if let Some(path) = protoc_file {
- eprintln!("Using protoc executable: {:?}", path);
+ eprintln!("Using protoc executable: {path:?}");
prost_build.protoc_executable(path);
}
prost_build
.compile_protos(&["proto/auron.proto"], &["proto"])
- .map_err(|e| format!("protobuf compilation failed: {}", e))
+ .map_err(|e| format!("protobuf compilation failed: {e}"))
}
diff --git a/native-engine/auron-planner/src/error.rs
b/native-engine/auron-planner/src/error.rs
index 22ab3ec3..62e46169 100644
--- a/native-engine/auron-planner/src/error.rs
+++ b/native-engine/auron-planner/src/error.rs
@@ -77,22 +77,22 @@ impl Display for PlanSerDeError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
match self {
PlanSerDeError::NotImplemented(desc) => {
- write!(f, "Not implemented: {}", desc)
+ write!(f, "Not implemented: {desc}")
}
- PlanSerDeError::General(desc) => write!(f, "General error: {}",
desc),
- PlanSerDeError::ArrowError(desc) => write!(f, "Arrow error: {}",
desc),
+ PlanSerDeError::General(desc) => write!(f, "General error:
{desc}"),
+ PlanSerDeError::ArrowError(desc) => write!(f, "Arrow error:
{desc}"),
PlanSerDeError::DataFusionError(desc) => {
- write!(f, "DataFusion error: {:?}", desc)
+ write!(f, "DataFusion error: {desc:?}")
}
- PlanSerDeError::IoError(desc) => write!(f, "IO error: {}", desc),
+ PlanSerDeError::IoError(desc) => write!(f, "IO error: {desc}"),
PlanSerDeError::Internal(desc) => {
- write!(f, "Internal error: {}", desc)
+ write!(f, "Internal error: {desc}")
}
Self::MissingRequiredField(name) => {
- write!(f, "Missing required field {}", name)
+ write!(f, "Missing required field {name}")
}
Self::UnknownEnumVariant { name, value } => {
- write!(f, "Unknown i32 value for {} enum: {}", name, value)
+ write!(f, "Unknown i32 value for {name} enum: {value}")
}
}
}
diff --git a/native-engine/auron-planner/src/lib.rs
b/native-engine/auron-planner/src/lib.rs
index d546fea2..a0f7b83d 100644
--- a/native-engine/auron-planner/src/lib.rs
+++ b/native-engine/auron-planner/src/lib.rs
@@ -95,8 +95,7 @@ pub fn from_proto_binary_op(op: &str) -> Result<Operator,
PlanSerDeError> {
"RegexNotMatch" => Ok(Operator::RegexNotMatch),
"StringConcat" => Ok(Operator::StringConcat),
other => Err(proto_error(format!(
- "Unsupported binary operator '{:?}'",
- other
+ "Unsupported binary operator '{other:?}'"
))),
}
}
diff --git a/native-engine/auron-planner/src/planner.rs
b/native-engine/auron-planner/src/planner.rs
index f081e32c..8e13312b 100644
--- a/native-engine/auron-planner/src/planner.rs
+++ b/native-engine/auron-planner/src/planner.rs
@@ -108,6 +108,7 @@ pub struct PhysicalPlanner {
partition_id: usize,
}
+#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
impl PhysicalPlanner {
pub fn new(partition_id: usize) -> Self {
Self { partition_id }
@@ -119,8 +120,7 @@ impl PhysicalPlanner {
) -> Result<Arc<dyn ExecutionPlan>, PlanError> {
let plan = spark_plan.physical_plan_type.as_ref().ok_or_else(|| {
proto_error(format!(
- "physical_plan::from_proto() Unsupported physical plan '{:?}'",
- spark_plan
+ "physical_plan::from_proto() Unsupported physical plan
'{spark_plan:?}'"
))
})?;
match plan {
@@ -212,13 +212,13 @@ impl PhysicalPlanner {
.iter()
.map(|col| {
let left_key = self.try_parse_physical_expr(
- &col.left
+ col.left
.as_ref()
.expect("hash join: left join key must be
present"),
&left.schema(),
)?;
let right_key = self.try_parse_physical_expr(
- &col.right
+ col.right
.as_ref()
.expect("hash join: right join key must be
present"),
&right.schema(),
@@ -260,13 +260,13 @@ impl PhysicalPlanner {
.iter()
.map(|col| {
let left_key = self.try_parse_physical_expr(
- &col.left
+ col.left
.as_ref()
.expect("sort-merge join: left join key must
be present"),
&left.schema(),
)?;
let right_key = self.try_parse_physical_expr(
- &col.right
+ col.right
.as_ref()
.expect("sort-merge join: right join key must
be present"),
&right.schema(),
@@ -354,7 +354,7 @@ impl PhysicalPlanner {
let exprs = self
.try_parse_physical_sort_expr(&input, sort)
.unwrap_or_else(|e| {
- panic!("Failed to parse physical sort expressions:
{}", e);
+ panic!("Failed to parse physical sort expressions:
{e}");
});
// always preserve partitioning
@@ -384,13 +384,13 @@ impl PhysicalPlanner {
.iter()
.map(|col| {
let left_key = self.try_parse_physical_expr(
- &col.left
+ col.left
.as_ref()
.expect("broadcast join: left join key must be
present"),
&left.schema(),
)?;
let right_key = self.try_parse_physical_expr(
- &col.right
+ col.right
.as_ref()
.expect("broadcast join: right join key must
be present"),
&right.schema(),
@@ -491,11 +491,9 @@ impl PhysicalPlanner {
.zip(agg.grouping_expr_name.iter())
.map(|(expr, name)| {
self.try_parse_physical_expr(expr, &input_schema)
- .and_then(|expr| {
- Ok(GroupingExpr {
- expr,
- field_name: name.to_owned(),
- })
+ .map(|expr| GroupingExpr {
+ expr,
+ field_name: name.to_owned(),
})
})
.collect::<Result<Vec<_>, _>>()?;
@@ -603,8 +601,7 @@ impl PhysicalPlanner {
.as_ref()
.ok_or_else(|| {
proto_error(format!(
- "physical_plan::from_proto()
Unexpected sort expr {:?}",
- spark_plan
+ "physical_plan::from_proto()
Unexpected sort expr {spark_plan:?}"
))
})?
.try_into()?,
@@ -693,8 +690,7 @@ impl PhysicalPlanner {
.as_ref()
.ok_or_else(|| {
proto_error(format!(
- "physical_plan::from_proto()
Unexpected sort expr {:?}",
- spark_plan
+ "physical_plan::from_proto()
Unexpected sort expr {spark_plan:?}"
))
})?
.as_ref();
@@ -707,8 +703,7 @@ impl PhysicalPlanner {
})
} else {
Err(PlanSerDeError::General(format!(
- "physical_plan::from_proto() {:?}",
- spark_plan
+ "physical_plan::from_proto() {spark_plan:?}"
)))
}
})
@@ -857,7 +852,7 @@ impl PhysicalPlanner {
Ok(e)
})
.collect::<Result<Vec<_>, _>>()?;
- in_list(expr, list_exprs, &e.negated, &input_schema)?
+ in_list(expr, list_exprs, &e.negated, input_schema)?
}
ExprType::Case(e) => Arc::new(CaseExpr::try_new(
e.expr
@@ -904,7 +899,7 @@ impl PhysicalPlanner {
self.partition_id,
)?;
Arc::new(create_udf(
- &format!("spark_ext_function_{}", fun_name),
+ &format!("spark_ext_function_{fun_name}"),
args.iter()
.map(|e| e.data_type(input_schema))
.collect::<Result<Vec<_>, _>>()?,
@@ -1039,8 +1034,7 @@ impl PhysicalPlanner {
.map(|expr| {
let expr = expr.expr_type.as_ref().ok_or_else(|| {
proto_error(format!(
- "physical_plan::from_proto() Unexpected expr {:?}",
- input
+ "physical_plan::from_proto() Unexpected expr {input:?}"
))
})?;
if let ExprType::Sort(sort_expr) = expr {
@@ -1049,8 +1043,7 @@ impl PhysicalPlanner {
.as_ref()
.ok_or_else(|| {
proto_error(format!(
- "physical_plan::from_proto() Unexpected sort
expr {:?}",
- input
+ "physical_plan::from_proto() Unexpected sort
expr {input:?}"
))
})?
.as_ref();
@@ -1063,8 +1056,7 @@ impl PhysicalPlanner {
})
} else {
Err(PlanSerDeError::General(format!(
- "physical_plan::from_proto() {:?}",
- input
+ "physical_plan::from_proto() {input:?}"
)))
}
})
@@ -1080,8 +1072,7 @@ impl PhysicalPlanner {
partitioning.map_or(Ok(None), |p| {
let plan = p.repartition_type.as_ref().ok_or_else(|| {
proto_error(format!(
- "partition::from_proto() Unsupported partition '{:?}'",
- p
+ "partition::from_proto() Unsupported partition '{p:?}'"
))
})?;
match plan {
@@ -1124,7 +1115,7 @@ impl PhysicalPlanner {
let exprs = self
.try_parse_physical_sort_expr(&input, &sort)
.unwrap_or_else(|e| {
- panic!("Failed to parse physical sort
expressions: {}", e);
+ panic!("Failed to parse physical sort
expressions: {e}");
});
let value_list: Vec<ScalarValue> = range_part
@@ -1149,7 +1140,7 @@ impl PhysicalPlanner {
.iter()
.map(|x| {
if let ScalarValue::List(single) = x {
- return single.value(0);
+ single.value(0)
} else {
unreachable!("expect list scalar value");
}
diff --git a/native-engine/auron/Cargo.toml b/native-engine/auron/Cargo.toml
index d269374d..9111318d 100644
--- a/native-engine/auron/Cargo.toml
+++ b/native-engine/auron/Cargo.toml
@@ -21,6 +21,9 @@ version = "0.1.0"
edition = "2024"
resolver = "1"
+[lints]
+workspace = true
+
[lib]
crate-type = ["cdylib"]
diff --git a/native-engine/auron/src/alloc.rs b/native-engine/auron/src/alloc.rs
index 9d976721..aa2db221 100644
--- a/native-engine/auron/src/alloc.rs
+++ b/native-engine/auron/src/alloc.rs
@@ -61,7 +61,7 @@ impl<T: GlobalAlloc> DebugAlloc<T> {
let last_updated = self.last_updated.load(SeqCst);
let delta = (current as isize - last_updated as isize).abs();
if delta > 104857600 {
- eprintln!(" * ALLOC {} -> {}", last_updated, current);
+ eprintln!(" * ALLOC {last_updated} -> {current}");
self.last_updated.store(current, SeqCst);
}
}
diff --git a/native-engine/auron/src/exec.rs b/native-engine/auron/src/exec.rs
index af0db9cf..fa4fec4a 100644
--- a/native-engine/auron/src/exec.rs
+++ b/native-engine/auron/src/exec.rs
@@ -64,9 +64,9 @@ pub extern "system" fn
Java_org_apache_auron_jni_JniBridge_callNative(
eprintln!("------ initializing auron native environment ------");
let log_level = env
.get_string(log_level)
- .map(|s| String::from(s))
+ .map(String::from)
.expect("init: failed to read log_level from env");
- eprintln!("initializing logging with level: {}", log_level);
+ eprintln!("initializing logging with level: {log_level}");
init_logging(log_level.as_str());
// init jni java classes
diff --git a/native-engine/auron/src/logging.rs
b/native-engine/auron/src/logging.rs
index 9fbc7f96..68c20d6b 100644
--- a/native-engine/auron/src/logging.rs
+++ b/native-engine/auron/src/logging.rs
@@ -20,15 +20,15 @@ use log::{Level, LevelFilter, Log, Metadata, Record};
use once_cell::sync::OnceCell;
thread_local! {
- pub static THREAD_TID: Cell<usize> = Cell::new(0);
- pub static THREAD_STAGE_ID: Cell<usize> = Cell::new(0);
- pub static THREAD_PARTITION_ID: Cell<usize> = Cell::new(0);
+ pub static THREAD_TID: Cell<usize> = const { Cell::new(0) };
+ pub static THREAD_STAGE_ID: Cell<usize> = const { Cell::new(0) };
+ pub static THREAD_PARTITION_ID: Cell<usize> = const { Cell::new(0) };
}
const DEFAULT_MAX_LEVEL: Level = Level::Info;
pub fn init_logging(level: &str) {
- let log_level = Level::from_str(level).unwrap_or_else(|_|
DEFAULT_MAX_LEVEL);
+ let log_level = Level::from_str(level).unwrap_or(DEFAULT_MAX_LEVEL);
static LOGGER: OnceCell<SimpleLogger> = OnceCell::new();
let logger = LOGGER.get_or_init(|| SimpleLogger {
start_instant: Instant::now(),
diff --git a/native-engine/auron/src/rt.rs b/native-engine/auron/src/rt.rs
index a8913b40..37f40daf 100644
--- a/native-engine/auron/src/rt.rs
+++ b/native-engine/auron/src/rt.rs
@@ -248,14 +248,14 @@ impl NativeExecutionRuntime {
};
match next_batch() {
- Ok(ret) => return ret,
+ Ok(ret) => ret,
Err(err) => {
let _ = set_error(
&self.native_wrapper,
&format!("poll record batch error: {err}"),
None,
);
- return false;
+ false
}
}
}
diff --git a/native-engine/datafusion-ext-commons/Cargo.toml
b/native-engine/datafusion-ext-commons/Cargo.toml
index 7ca7fe6a..3bf788da 100644
--- a/native-engine/datafusion-ext-commons/Cargo.toml
+++ b/native-engine/datafusion-ext-commons/Cargo.toml
@@ -21,6 +21,9 @@ version = "0.1.0"
edition = "2024"
resolver = "1"
+[lints]
+workspace = true
+
[features]
default = ["tokio/rt-multi-thread"]
diff --git a/native-engine/datafusion-ext-commons/src/arrow/cast.rs
b/native-engine/datafusion-ext-commons/src/arrow/cast.rs
index ee536c04..4ca3f87f 100644
--- a/native-engine/datafusion-ext-commons/src/arrow/cast.rs
+++ b/native-engine/datafusion-ext-commons/src/arrow/cast.rs
@@ -162,7 +162,7 @@ pub fn cast_impl(
// correct orc map entries field name from "keys" to
"key", "values" to
// "value"
if col.is_none() && (origin.eq("key") ||
origin.eq("value")) {
- let adjust = format!("{}s", origin);
+ let adjust = format!("{origin}s");
col = struct_.column_by_name(adjust.as_str());
}
if col.is_some() {
@@ -383,7 +383,7 @@ fn try_cast_string_array_to_date(array: &dyn Array) ->
Result<ArrayRef> {
let strings = array.as_string::<i32>();
let mut converted_values = Vec::with_capacity(strings.len());
for s in strings {
- converted_values.push(s.and_then(|s| to_date(s)));
+ converted_values.push(s.and_then(to_date));
}
Ok(Arc::new(Date32Array::from(converted_values)))
}
@@ -502,7 +502,7 @@ fn to_date(s: &str) -> Option<i32> {
i += 1;
} else {
let parsed_value = (b - b'0') as i32;
- if parsed_value < 0 || parsed_value > 9 {
+ if !(0..=9).contains(&parsed_value) {
return None;
} else {
current_segment_value = current_segment_value * 10 +
parsed_value;
diff --git a/native-engine/datafusion-ext-commons/src/arrow/coalesce.rs
b/native-engine/datafusion-ext-commons/src/arrow/coalesce.rs
index b69dd61e..4c24bf16 100644
--- a/native-engine/datafusion-ext-commons/src/arrow/coalesce.rs
+++ b/native-engine/datafusion-ext-commons/src/arrow/coalesce.rs
@@ -217,17 +217,17 @@ mod tests {
#[test]
fn test_coalesce_string_with_offsets() -> Result<()> {
let array: ArrayRef = Arc::new(StringArray::from(vec![
- Some(format!("000")),
- Some(format!("111")),
- Some(format!("222")),
- Some(format!("333")),
- Some(format!("444")),
+ Some("000".to_string()),
+ Some("111".to_string()),
+ Some("222".to_string()),
+ Some("333".to_string()),
+ Some("444".to_string()),
None,
- Some(format!("666666")),
- Some(format!("777")),
- Some(format!("888")),
- Some(format!("999")),
- Some(format!("101010")),
+ Some("666666".to_string()),
+ Some("777".to_string()),
+ Some("888".to_string()),
+ Some("999".to_string()),
+ Some("101010".to_string()),
]));
let test = vec![array.slice(0, 6), array.slice(2, 6), array.slice(4,
6)];
let coalesced = coalesce_arrays_unchecked(&DataType::Utf8, &test);
diff --git a/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
b/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
index 1cccb9e7..1df97694 100644
--- a/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
+++ b/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
@@ -327,7 +327,7 @@ fn eq_struct(
}
return false;
}
- return true;
+ true
});
Ok(f)
}
@@ -402,7 +402,7 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(0, 1));
+ assert!(!eq(0, 1));
Ok(())
}
@@ -415,7 +415,7 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
+ assert!(!eq(0, 0));
Ok(())
}
@@ -425,7 +425,7 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, (eq)(0, 1));
+ assert!(!(eq)(0, 1));
Ok(())
}
@@ -436,7 +436,7 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
+ assert!(!eq(0, 0));
Ok(())
}
@@ -446,7 +446,7 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(0, 1));
+ assert!(!eq(0, 1));
Ok(())
}
@@ -456,9 +456,9 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(true, eq(0, 0));
- assert_eq!(false, eq(0, 1));
- assert_eq!(false, eq(1, 1)); // NaN != NaN
+ assert!(eq(0, 0));
+ assert!(!eq(0, 1));
+ assert!(!eq(1, 1)); // NaN != NaN
Ok(())
}
@@ -468,8 +468,8 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(true, eq(0, 1)); // -0.0 == 0.0
- assert_eq!(true, eq(1, 0));
+ assert!(eq(0, 1)); // -0.0 == 0.0
+ assert!(eq(1, 0));
Ok(())
}
@@ -486,14 +486,14 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(0, 1));
- assert_eq!(false, eq(1, 0));
+ assert!(!eq(0, 1));
+ assert!(!eq(1, 0));
// somewhat confusingly, while 90M milliseconds is more than 1 day,
// it will compare less as the comparison is done on the underlying
// values not field by field
- assert_eq!(false, eq(1, 2));
- assert_eq!(false, eq(2, 1));
+ assert!(!eq(1, 2));
+ assert!(!eq(2, 1));
Ok(())
}
@@ -510,12 +510,12 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(0, 1));
- assert_eq!(false, eq(1, 0));
+ assert!(!eq(0, 1));
+ assert!(!eq(1, 0));
// the underlying representation is months, so both quantities are the
same
- assert_eq!(true, eq(1, 2));
- assert_eq!(true, eq(2, 1));
+ assert!(eq(1, 2));
+ assert!(eq(2, 1));
Ok(())
}
@@ -532,14 +532,14 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(0, 1));
- assert_eq!(false, eq(1, 0));
+ assert!(!eq(0, 1));
+ assert!(!eq(1, 0));
// somewhat confusingly, while 100 days is more than 1 month in all
cases
// it will compare less as the comparison is done on the underlying
// values not field by field
- assert_eq!(false, eq(1, 2));
- assert_eq!(false, eq(2, 1));
+ assert!(!eq(1, 2));
+ assert!(!eq(2, 1));
Ok(())
}
@@ -551,8 +551,8 @@ pub mod tests {
.with_precision_and_scale(23, 6)?;
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(1, 0));
- assert_eq!(false, eq(0, 2));
+ assert!(!eq(1, 0));
+ assert!(!eq(0, 2));
Ok(())
}
@@ -568,8 +568,8 @@ pub mod tests {
.with_precision_and_scale(53, 6)?;
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(1, 0));
- assert_eq!(false, eq(0, 2));
+ assert!(!eq(1, 0));
+ assert!(!eq(0, 2));
Ok(())
}
@@ -580,9 +580,9 @@ pub mod tests {
let eq = make_eq_comparator(&array, &array, false)?;
- assert_eq!(false, eq(0, 1));
- assert_eq!(true, eq(3, 4));
- assert_eq!(false, eq(2, 3));
+ assert!(!eq(0, 1));
+ assert!(eq(3, 4));
+ assert!(!eq(2, 3));
Ok(())
}
@@ -595,9 +595,9 @@ pub mod tests {
let eq = make_eq_comparator(&a1, &a2, false)?;
- assert_eq!(false, eq(0, 0));
- assert_eq!(true, eq(0, 3));
- assert_eq!(false, eq(1, 3));
+ assert!(!eq(0, 0));
+ assert!(eq(0, 3));
+ assert!(!eq(1, 3));
Ok(())
}
@@ -613,11 +613,11 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
- assert_eq!(false, eq(0, 3));
- assert_eq!(true, eq(3, 3));
- assert_eq!(false, eq(3, 1));
- assert_eq!(false, eq(3, 2));
+ assert!(!eq(0, 0));
+ assert!(!eq(0, 3));
+ assert!(eq(3, 3));
+ assert!(!eq(3, 1));
+ assert!(!eq(3, 2));
Ok(())
}
@@ -633,11 +633,11 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
- assert_eq!(false, eq(0, 3));
- assert_eq!(true, eq(3, 3));
- assert_eq!(false, eq(3, 1));
- assert_eq!(false, eq(3, 2));
+ assert!(!eq(0, 0));
+ assert!(!eq(0, 3));
+ assert!(eq(3, 3));
+ assert!(!eq(3, 1));
+ assert!(!eq(3, 2));
Ok(())
}
@@ -653,11 +653,11 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
- assert_eq!(false, eq(0, 3));
- assert_eq!(true, eq(3, 3));
- assert_eq!(false, eq(3, 1));
- assert_eq!(false, eq(3, 2));
+ assert!(!eq(0, 0));
+ assert!(!eq(0, 3));
+ assert!(eq(3, 3));
+ assert!(!eq(3, 1));
+ assert!(!eq(3, 2));
Ok(())
}
@@ -673,11 +673,11 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
- assert_eq!(false, eq(0, 3));
- assert_eq!(true, eq(3, 3));
- assert_eq!(false, eq(3, 1));
- assert_eq!(false, eq(3, 2));
+ assert!(!eq(0, 0));
+ assert!(!eq(0, 3));
+ assert!(eq(3, 3));
+ assert!(!eq(3, 1));
+ assert!(!eq(3, 2));
Ok(())
}
@@ -693,11 +693,11 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
- assert_eq!(false, eq(0, 3));
- assert_eq!(true, eq(3, 3));
- assert_eq!(false, eq(3, 1));
- assert_eq!(false, eq(3, 2));
+ assert!(!eq(0, 0));
+ assert!(!eq(0, 3));
+ assert!(eq(3, 3));
+ assert!(!eq(3, 1));
+ assert!(!eq(3, 2));
Ok(())
}
@@ -723,11 +723,11 @@ pub mod tests {
let eq = make_eq_comparator(&array1, &array2, false)?;
- assert_eq!(false, eq(0, 0));
- assert_eq!(false, eq(0, 3));
- assert_eq!(true, eq(3, 3));
- assert_eq!(false, eq(3, 1));
- assert_eq!(false, eq(3, 2));
+ assert!(!eq(0, 0));
+ assert!(!eq(0, 3));
+ assert!(eq(3, 3));
+ assert!(!eq(3, 1));
+ assert!(!eq(3, 2));
Ok(())
}
@@ -736,9 +736,9 @@ pub mod tests {
let a = GenericByteArray::<T>::new(offsets, b"abcdefa".into(), None);
let eq = make_eq_comparator(&a, &a, false)?;
- assert_eq!(false, eq(0, 1));
- assert_eq!(false, eq(0, 2));
- assert_eq!(true, eq(1, 1));
+ assert!(!eq(0, 1));
+ assert!(!eq(0, 2));
+ assert!(eq(1, 1));
Ok(())
}
@@ -781,13 +781,13 @@ pub mod tests {
let b = b.finish();
let eq = make_eq_comparator(&a, &b, false)?;
- assert_eq!(eq(0, 0), false); // lists contains null never equal
- assert_eq!(eq(0, 1), false);
- assert_eq!(eq(0, 2), false);
- assert_eq!(eq(1, 2), false);
- assert_eq!(eq(1, 3), false);
- assert_eq!(eq(2, 0), false);
- assert_eq!(eq(4, 4), true);
+ assert!(!eq(0, 0)); // lists contains null never equal
+ assert!(!eq(0, 1));
+ assert!(!eq(0, 2));
+ assert!(!eq(1, 2));
+ assert!(!eq(1, 3));
+ assert!(!eq(2, 0));
+ assert!(eq(4, 4));
Ok(())
}
@@ -816,13 +816,13 @@ pub mod tests {
let s2 = StructArray::new(fields.clone(), values, None);
let eq = make_eq_comparator(&s1, &s2, false)?;
- assert_eq!(eq(0, 1), false); // (1, [1, 2]) eq (2, None)
- assert_eq!(eq(0, 0), false); // (1, [1, 2]) eq (None, None)
- assert_eq!(eq(1, 1), false); // (2, [None]) eq (2, None)
- assert_eq!(eq(2, 2), false); // (None, None) eq (None, [])
- assert_eq!(eq(3, 0), false); // None eq (None, [])
- assert_eq!(eq(2, 0), false); // (None, None) eq (None, None)
- assert_eq!(eq(3, 0), false); // None eq (None, None)
+ assert!(!eq(0, 1)); // (1, [1, 2]) eq (2, None)
+ assert!(!eq(0, 0)); // (1, [1, 2]) eq (None, None)
+ assert!(!eq(1, 1)); // (2, [None]) eq (2, None)
+ assert!(!eq(2, 2)); // (None, None) eq (None, [])
+ assert!(!eq(3, 0)); // None eq (None, [])
+ assert!(!eq(2, 0)); // (None, None) eq (None, None)
+ assert!(!eq(3, 0)); // None eq (None, None)
Ok(())
}
}
diff --git a/native-engine/datafusion-ext-commons/src/arrow/selection.rs
b/native-engine/datafusion-ext-commons/src/arrow/selection.rs
index bc15e87d..9cd08a9e 100644
--- a/native-engine/datafusion-ext-commons/src/arrow/selection.rs
+++ b/native-engine/datafusion-ext-commons/src/arrow/selection.rs
@@ -53,7 +53,7 @@ fn take_cols_internal<T: ArrowPrimitiveType>(
cols: &[ArrayRef],
indices: &PrimitiveArray<T>,
) -> Result<Vec<ArrayRef>> {
- cols.into_iter()
+ cols.iter()
.map(|c| Ok(arrow::compute::take(&c, indices, None)?))
.collect::<Result<_>>()
}
@@ -108,7 +108,7 @@ pub fn create_array_interleaver(
}
fn nulls(&self, indices: &[(usize, usize)]) -> Option<NullBuffer> {
- let nulls = match self.has_nulls {
+ match self.has_nulls {
true => {
let mut builder = BooleanBufferBuilder::new(indices.len());
for (a, b) in indices {
@@ -118,8 +118,7 @@ pub fn create_array_interleaver(
Some(NullBuffer::new(builder.finish()))
}
false => None,
- };
- nulls
+ }
}
}
@@ -139,10 +138,11 @@ pub fn create_array_interleaver(
let (prefetch_array_idx, prefetch_value_idx) = indices[i +
PREFETCH_AHEAD];
prefetch_read_data!({
let array =
interleaver.arrays.get_unchecked(prefetch_array_idx);
- let ptr = array
+
+ (array
.values()
- .get_unchecked(array.offset() +
prefetch_value_idx);
- ptr
+ .get_unchecked(array.offset() +
prefetch_value_idx))
+ as _
});
}
}
diff --git a/native-engine/datafusion-ext-commons/src/hadoop_fs.rs
b/native-engine/datafusion-ext-commons/src/hadoop_fs.rs
index 4dca7bb9..0f50d5fd 100644
--- a/native-engine/datafusion-ext-commons/src/hadoop_fs.rs
+++ b/native-engine/datafusion-ext-commons/src/hadoop_fs.rs
@@ -99,7 +99,7 @@ impl Drop for FsDataInputWrapper {
fn drop(&mut self) {
let _timer = self.io_time.timer();
if let Err(e) = jni_call!(JavaAutoCloseable(self.obj.as_obj()).close()
-> ()) {
- log::warn!("error closing hadoop FSDataInputStream: {:?}", e);
+ log::warn!("error closing hadoop FSDataInputStream: {e:?}");
}
}
}
diff --git a/native-engine/datafusion-ext-commons/src/hash/xxhash.rs
b/native-engine/datafusion-ext-commons/src/hash/xxhash.rs
index a0fa967f..31bbab07 100644
--- a/native-engine/datafusion-ext-commons/src/hash/xxhash.rs
+++ b/native-engine/datafusion-ext-commons/src/hash/xxhash.rs
@@ -35,7 +35,7 @@ fn xxhash64(input: &[u8], seed: u64) -> u64 {
if remaining >= 32 {
let mut acc1 = seed + PRIME64_1 + PRIME64_2;
let mut acc2 = seed + PRIME64_2;
- let mut acc3 = seed + 0;
+ let mut acc3 = seed;
let mut acc4 = seed - PRIME64_1;
while remaining >= 32 {
diff --git a/native-engine/datafusion-ext-commons/src/io/batch_serde.rs
b/native-engine/datafusion-ext-commons/src/io/batch_serde.rs
index bbed9b36..2972e11b 100644
--- a/native-engine/datafusion-ext-commons/src/io/batch_serde.rs
+++ b/native-engine/datafusion-ext-commons/src/io/batch_serde.rs
@@ -95,7 +95,7 @@ pub fn read_batch(
let cols = schema
.fields()
.into_iter()
- .map(|field| read_array(&mut input, &field.data_type(), num_rows, &mut
transpose_opt))
+ .map(|field| read_array(&mut input, field.data_type(), num_rows, &mut
transpose_opt))
.collect::<Result<_>>()?;
Ok(Some((num_rows, cols)))
}
@@ -196,7 +196,7 @@ fn write_bits_buffer<W: Write>(
bits_len: usize,
output: &mut W,
) -> Result<()> {
- let mut out_buffer = vec![0u8; (bits_len + 7) / 8];
+ let mut out_buffer = vec![0u8; bits_len.div_ceil(8)];
let in_ptr = buffer.as_ptr();
let out_ptr = out_buffer.as_mut_ptr();
@@ -212,7 +212,7 @@ fn write_bits_buffer<W: Write>(
}
fn read_bits_buffer<R: Read>(input: &mut R, bits_len: usize) -> Result<Buffer>
{
- let buf = read_bytes_slice(input, (bits_len + 7) / 8)?;
+ let buf = read_bytes_slice(input, bits_len.div_ceil(8))?;
Ok(Buffer::from_vec(buf.into()))
}
@@ -741,7 +741,7 @@ mod test {
])?;
assert_batches_eq!(
- vec![
+ [
"+-----------+-----------+",
"| list1 | list2 |",
"+-----------+-----------+",
@@ -749,7 +749,7 @@ mod test {
"| | |",
"| [3, , 5] | [3, , 5] |",
"| [6, 7] | [6, 7] |",
- "+-----------+-----------+",
+ "+-----------+-----------+"
],
&[batch.clone()]
);
@@ -761,7 +761,7 @@ mod test {
let (decoded_num_rows, decoded_cols) =
read_batch(&mut cursor, &batch.schema())?.expect("non-empty
batch");
assert_batches_eq!(
- vec![
+ [
"+-----------+-----------+",
"| list1 | list2 |",
"+-----------+-----------+",
@@ -769,7 +769,7 @@ mod test {
"| | |",
"| [3, , 5] | [3, , 5] |",
"| [6, 7] | [6, 7] |",
- "+-----------+-----------+",
+ "+-----------+-----------+"
],
&[recover_named_batch(
decoded_num_rows,
@@ -786,13 +786,13 @@ mod test {
let (decoded_num_rows, decoded_cols) =
read_batch(&mut cursor, &batch.schema())?.expect("non-empty
batch");
assert_batches_eq!(
- vec![
+ [
"+----------+----------+",
"| list1 | list2 |",
"+----------+----------+",
"| | |",
"| [3, , 5] | [3, , 5] |",
- "+----------+----------+",
+ "+----------+----------+"
],
&[recover_named_batch(
decoded_num_rows,
diff --git a/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
b/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
index cb2e51e6..c774c745 100644
--- a/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
+++ b/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
@@ -272,7 +272,7 @@ fn io_compression_codec() -> &'static str {
if is_jni_bridge_inited() {
conf::SPARK_IO_COMPRESSION_CODEC.value()
} else {
- Ok(format!("lz4")) // for testing
+ Ok("lz4".to_string()) // for testing
}
})
.expect("error reading spark.io.compression.codec")
diff --git a/native-engine/datafusion-ext-commons/src/io/mod.rs
b/native-engine/datafusion-ext-commons/src/io/mod.rs
index a8160174..fd3276bf 100644
--- a/native-engine/datafusion-ext-commons/src/io/mod.rs
+++ b/native-engine/datafusion-ext-commons/src/io/mod.rs
@@ -49,7 +49,7 @@ pub fn recover_named_batch(
let cols = cols
.iter()
.zip(schema.fields())
- .map(|(col, field)| Ok(cast(&col, field.data_type())?))
+ .map(|(col, field)| cast(&col, field.data_type()))
.collect::<Result<Vec<_>>>()?;
Ok(RecordBatch::try_new_with_options(
schema,
diff --git a/native-engine/datafusion-ext-commons/src/io/scalar_serde.rs
b/native-engine/datafusion-ext-commons/src/io/scalar_serde.rs
index 90d6ec98..5378145a 100644
--- a/native-engine/datafusion-ext-commons/src/io/scalar_serde.rs
+++ b/native-engine/datafusion-ext-commons/src/io/scalar_serde.rs
@@ -69,7 +69,7 @@ pub fn write_scalar<W: Write>(value: &ScalarValue, nullable:
bool, output: &mut
ScalarValue::TimestampNanosecond(v, ..) => write_prim!(v),
ScalarValue::Utf8(v) => {
if let Some(v) = v {
- write_len(v.as_bytes().len() + 1, output)?;
+ write_len(v.len() + 1, output)?;
output.write_all(v.as_bytes())?;
} else {
write_len(0, output)?;
diff --git a/native-engine/datafusion-ext-commons/src/lib.rs
b/native-engine/datafusion-ext-commons/src/lib.rs
index bb897ca5..8491c379 100644
--- a/native-engine/datafusion-ext-commons/src/lib.rs
+++ b/native-engine/datafusion-ext-commons/src/lib.rs
@@ -211,7 +211,7 @@ impl<T: Sized + Copy> SliceAsRawBytes for [T] {
let bytes_ptr = self.as_ptr() as *const u8;
unsafe {
// safety: access raw bytes
- std::slice::from_raw_parts(bytes_ptr, size_of::<T>() * self.len())
+ std::slice::from_raw_parts(bytes_ptr, std::mem::size_of_val(self))
}
}
@@ -219,7 +219,7 @@ impl<T: Sized + Copy> SliceAsRawBytes for [T] {
let bytes_ptr = self.as_mut_ptr() as *mut u8;
unsafe {
// safety: access raw bytes
- std::slice::from_raw_parts_mut(bytes_ptr, size_of::<T>() *
self.len())
+ std::slice::from_raw_parts_mut(bytes_ptr,
std::mem::size_of_val(self))
}
}
}
diff --git a/native-engine/datafusion-ext-commons/src/spark_bit_array.rs
b/native-engine/datafusion-ext-commons/src/spark_bit_array.rs
index ae365720..fb7f2d81 100644
--- a/native-engine/datafusion-ext-commons/src/spark_bit_array.rs
+++ b/native-engine/datafusion-ext-commons/src/spark_bit_array.rs
@@ -40,7 +40,7 @@ impl SparkBitArray {
num_bits <= i32::MAX as usize,
"cannot allocate enough space for {num_bits} bits"
);
- let data_len = (num_bits + 63) / 64;
+ let data_len = num_bits.div_ceil(64);
Self::new(vec![0; data_len])
}
diff --git a/native-engine/datafusion-ext-commons/src/spark_hash.rs
b/native-engine/datafusion-ext-commons/src/spark_hash.rs
index 66be9848..097ae2b2 100644
--- a/native-engine/datafusion-ext-commons/src/spark_hash.rs
+++ b/native-engine/datafusion-ext-commons/src/spark_hash.rs
@@ -56,6 +56,7 @@ pub fn create_hashes<T: num::PrimInt>(
hash_buffer
}
+#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
#[inline]
fn hash_array<T: num::PrimInt>(
array: &ArrayRef,
@@ -245,6 +246,7 @@ fn create_hashes_dictionary<K: ArrowDictionaryKeyType, T:
num::PrimInt>(
}
}
+#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
fn hash_one<T: num::PrimInt>(
col: &ArrayRef,
idx: usize,
@@ -404,7 +406,7 @@ mod tests {
#[test]
fn test_list() {
- let mut hashes_buffer = vec![42; 4];
+ let mut hashes_buffer = [42; 4];
for hash in hashes_buffer.iter_mut() {
*hash = spark_compatible_murmur3_hash(5_i32.to_le_bytes(), *hash);
}
@@ -523,7 +525,7 @@ mod tests {
let value_data = ArrayData::builder(DataType::Int32)
.len(6)
.add_buffer(Buffer::from_slice_ref(
- &[1i32, 2, 3, 4, 5, 6].to_byte_slice(),
+ [1i32, 2, 3, 4, 5, 6].to_byte_slice(),
))
.build()?;
@@ -531,7 +533,7 @@ mod tests {
let list_data_type = DataType::new_list(DataType::Int32, false);
let list_data = ArrayData::builder(list_data_type)
.len(3)
- .add_buffer(Buffer::from_slice_ref(&[0i32, 2, 5,
6].to_byte_slice()))
+ .add_buffer(Buffer::from_slice_ref([0i32, 2, 5,
6].to_byte_slice()))
.add_child_data(value_data)
.build()?;
@@ -550,20 +552,20 @@ mod tests {
let key_data = ArrayData::builder(DataType::Int32)
.len(8)
.add_buffer(Buffer::from_slice_ref(
- &[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice(),
+ [0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice(),
))
.build()?;
let value_data = ArrayData::builder(DataType::UInt32)
.len(8)
.add_buffer(Buffer::from_slice_ref(
- &[0u32, 10, 20, 0, 40, 0, 60, 70].to_byte_slice(),
+ [0u32, 10, 20, 0, 40, 0, 60, 70].to_byte_slice(),
))
.null_bit_buffer(Some(Buffer::from(&[0b11010110])))
.build()?;
// Construct a buffer for value offsets, for the nested array:
// [[0, 1, 2], [3, 4, 5], [6, 7]]
- let entry_offsets = Buffer::from_slice_ref(&[0, 3, 6,
8].to_byte_slice());
+ let entry_offsets = Buffer::from_slice_ref([0, 3, 6,
8].to_byte_slice());
let keys_field = Arc::new(Field::new("keys", DataType::Int32, false));
let values_field = Arc::new(Field::new("values", DataType::UInt32,
true));
diff --git a/native-engine/datafusion-ext-exprs/Cargo.toml
b/native-engine/datafusion-ext-exprs/Cargo.toml
index 4387bbdf..961c54b7 100644
--- a/native-engine/datafusion-ext-exprs/Cargo.toml
+++ b/native-engine/datafusion-ext-exprs/Cargo.toml
@@ -21,6 +21,9 @@ version = "0.1.0"
edition = "2024"
resolver = "1"
+[lints]
+workspace = true
+
[dependencies]
arrow = { workspace = true }
auron-jni-bridge = { workspace = true }
diff --git
a/native-engine/datafusion-ext-exprs/src/bloom_filter_might_contain.rs
b/native-engine/datafusion-ext-exprs/src/bloom_filter_might_contain.rs
index 663de05a..b0e7ad5a 100644
--- a/native-engine/datafusion-ext-exprs/src/bloom_filter_might_contain.rs
+++ b/native-engine/datafusion-ext-exprs/src/bloom_filter_might_contain.rs
@@ -198,7 +198,7 @@ fn get_cached_bloom_filter(
) -> Result<Arc<Option<SparkBloomFilter>>> {
// remove expire keys and insert new key
let slot = {
- let cached_bloom_filter = CACHED_BLOOM_FILTER.get_or_init(||
Arc::default());
+ let cached_bloom_filter =
CACHED_BLOOM_FILTER.get_or_init(Arc::default);
let mut cached_bloom_filter = cached_bloom_filter.lock();
cached_bloom_filter
.entry(uuid.to_string())
@@ -217,7 +217,7 @@ fn get_cached_bloom_filter(
}
fn clear_cached_bloom_filter() {
- let cached_bloom_filter = CACHED_BLOOM_FILTER.get_or_init(||
Arc::default());
+ let cached_bloom_filter = CACHED_BLOOM_FILTER.get_or_init(Arc::default);
let mut cached_bloom_filter = cached_bloom_filter.lock();
cached_bloom_filter.retain(|_, v| Arc::strong_count(v) > 0);
}
diff --git a/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
b/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
index 607cc0b5..63448f9d 100644
--- a/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
+++ b/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
@@ -217,7 +217,7 @@ mod test {
let output_batch =
RecordBatch::try_from_iter_with_nullable(vec![("cccccc1",
output_array, true)])?;
- let expected = vec![
+ let expected = [
"+---------+",
"| cccccc1 |",
"+---------+",
@@ -235,7 +235,7 @@ mod test {
let output_array = get_indexed.evaluate(&input_batch)?.into_array(0)?;
let output_batch =
RecordBatch::try_from_iter_with_nullable(vec![("cccccc1",
output_array, true)])?;
- let expected = vec![
+ let expected = [
"+---------+",
"| cccccc1 |",
"+---------+",
diff --git a/native-engine/datafusion-ext-exprs/src/get_map_value.rs
b/native-engine/datafusion-ext-exprs/src/get_map_value.rs
index e2a968e0..61729270 100644
--- a/native-engine/datafusion-ext-exprs/src/get_map_value.rs
+++ b/native-engine/datafusion-ext-exprs/src/get_map_value.rs
@@ -178,18 +178,18 @@ mod test {
let key_data = ArrayData::builder(DataType::Int32)
.len(8)
.add_buffer(Buffer::from_slice_ref(
- &[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice(),
+ [0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice(),
))
.build()?;
let value_data = ArrayData::builder(DataType::UInt32)
.len(8)
.add_buffer(Buffer::from_slice_ref(
- &[0u32, 10, 20, 0, 40, 0, 60, 70].to_byte_slice(),
+ [0u32, 10, 20, 0, 40, 0, 60, 70].to_byte_slice(),
))
- .null_bit_buffer(Some(Buffer::from_slice_ref(&[0b11010110])))
+ .null_bit_buffer(Some(Buffer::from_slice_ref([0b11010110])))
.build()?;
- let entry_offsets = Buffer::from_slice_ref(&[0, 3, 6,
8].to_byte_slice());
+ let entry_offsets = Buffer::from_slice_ref([0, 3, 6,
8].to_byte_slice());
let keys_field = Arc::new(Field::new("keys", DataType::Int32, false));
let values_field = Arc::new(Field::new("values", DataType::UInt32,
true));
@@ -223,7 +223,7 @@ mod test {
let output_array = get_indexed.evaluate(&input_batch)?.into_array(0)?;
let output_batch =
RecordBatch::try_from_iter_with_nullable(vec![("test col",
output_array, true)])?;
- let expected = vec![
+ let expected = [
"+----------+",
"| test col |",
"+----------+",
@@ -239,7 +239,7 @@ mod test {
let output_array = get_indexed.evaluate(&input_batch)?.into_array(0)?;
let output_batch =
RecordBatch::try_from_iter_with_nullable(vec![("test col",
output_array, true)])?;
- let expected = vec![
+ let expected = [
"+----------+",
"| test col |",
"+----------+",
@@ -274,7 +274,7 @@ mod test {
let output_batch =
RecordBatch::try_from_iter_with_nullable(vec![("test col",
output_array, true)])?;
- let expected = vec![
+ let expected = [
"+----------+",
"| test col |",
"+----------+",
@@ -290,7 +290,7 @@ mod test {
let output_array = get_indexed.evaluate(&input_batch)?.into_array(0)?;
let output_batch =
RecordBatch::try_from_iter_with_nullable(vec![("test col",
output_array, true)])?;
- let expected = vec![
+ let expected = [
"+----------+",
"| test col |",
"+----------+",
diff --git a/native-engine/datafusion-ext-exprs/src/named_struct.rs
b/native-engine/datafusion-ext-exprs/src/named_struct.rs
index fa5ea8b1..7caef85e 100644
--- a/native-engine/datafusion-ext-exprs/src/named_struct.rs
+++ b/native-engine/datafusion-ext-exprs/src/named_struct.rs
@@ -163,7 +163,7 @@ mod test {
let output_batch =
RecordBatch::try_from_iter_with_nullable(vec![("cccccc1",
output_array, true)])?;
- let expected = vec![
+ let expected = [
"+--------------------------------------------------------+",
"| cccccc1 |",
"+--------------------------------------------------------+",
diff --git a/native-engine/datafusion-ext-exprs/src/row_num.rs
b/native-engine/datafusion-ext-exprs/src/row_num.rs
index 29e6a353..550592b5 100644
--- a/native-engine/datafusion-ext-exprs/src/row_num.rs
+++ b/native-engine/datafusion-ext-exprs/src/row_num.rs
@@ -80,7 +80,7 @@ impl PhysicalExpr for RowNumExpr {
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
let num_rows = batch.num_rows();
let cur = self.cur.fetch_add(num_rows as i64, SeqCst);
- let array: Int64Array = (cur..cur + num_rows as
i64).into_iter().collect();
+ let array: Int64Array = (cur..cur + num_rows as i64).collect();
Ok(ColumnarValue::Array(Arc::new(array)))
}
diff --git
a/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
b/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
index 31aed4c9..c0ae33ee 100644
--- a/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
+++ b/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
@@ -60,7 +60,7 @@ impl SparkScalarSubqueryWrapperExpr {
impl Display for SparkScalarSubqueryWrapperExpr {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
- write!(f, "{:?}", self)
+ write!(f, "{self:?}")
}
}
@@ -113,7 +113,7 @@ impl PhysicalExpr for SparkScalarSubqueryWrapperExpr {
self.return_type.clone(),
self.return_nullable,
vec![],
- format!("Subquery"),
+ "Subquery".to_string(),
)?;
let stub_batch = RecordBatch::try_new_with_options(
Arc::new(Schema::empty()),
diff --git a/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
b/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
index 075865cc..c598ec29 100644
--- a/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
+++ b/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
@@ -193,7 +193,7 @@ impl PhysicalExpr for SparkUDFWrapperExpr {
Ok(Arc::new(Self::try_new(
self.serialized.clone(),
self.return_type.clone(),
- self.return_nullable.clone(),
+ self.return_nullable,
children,
self.expr_string.clone(),
)?))
diff --git a/native-engine/datafusion-ext-functions/Cargo.toml
b/native-engine/datafusion-ext-functions/Cargo.toml
index d3c5128b..4ac5e440 100644
--- a/native-engine/datafusion-ext-functions/Cargo.toml
+++ b/native-engine/datafusion-ext-functions/Cargo.toml
@@ -21,6 +21,9 @@ version = "0.1.0"
edition = "2024"
resolver = "1"
+[lints]
+workspace = true
+
[dependencies]
arrow = { workspace = true }
auron-jni-bridge = { workspace = true }
diff --git
a/native-engine/datafusion-ext-functions/src/brickhouse/array_union.rs
b/native-engine/datafusion-ext-functions/src/brickhouse/array_union.rs
index 39064099..833cc1fb 100644
--- a/native-engine/datafusion-ext-functions/src/brickhouse/array_union.rs
+++ b/native-engine/datafusion-ext-functions/src/brickhouse/array_union.rs
@@ -81,7 +81,6 @@ pub fn array_union(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
offset_buffer.push(offset);
let scalars = (0..num_rows)
- .into_iter()
.map(|row_idx| {
let mut set = HashSet::new();
let mut valid = true;
diff --git a/native-engine/datafusion-ext-functions/src/lib.rs
b/native-engine/datafusion-ext-functions/src/lib.rs
index db297f29..a65dc0d4 100644
--- a/native-engine/datafusion-ext-functions/src/lib.rs
+++ b/native-engine/datafusion-ext-functions/src/lib.rs
@@ -35,6 +35,7 @@ mod spark_round;
mod spark_strings;
mod spark_unscaled_value;
+#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
pub fn create_auron_ext_function(
name: &str,
spark_partition_id: usize,
diff --git a/native-engine/datafusion-ext-functions/src/spark_bround.rs
b/native-engine/datafusion-ext-functions/src/spark_bround.rs
index 4cb0bd99..9cc476b0 100644
--- a/native-engine/datafusion-ext-functions/src/spark_bround.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_bround.rs
@@ -209,8 +209,10 @@ fn round_half_even_f32(x: f32) -> f32 {
f + 1.0
} else if diff < 0.5 {
f
+ } else if ((f as i32) & 1) == 0 {
+ f
} else {
- if ((f as i32) & 1) == 0 { f } else { f + 1.0 }
+ f + 1.0
};
rounded.copysign(sign)
diff --git a/native-engine/datafusion-ext-functions/src/spark_check_overflow.rs
b/native-engine/datafusion-ext-functions/src/spark_check_overflow.rs
index 5a67a844..98bd7471 100644
--- a/native-engine/datafusion-ext-functions/src/spark_check_overflow.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_check_overflow.rs
@@ -33,8 +33,7 @@ pub fn spark_check_overflow(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
};
assert!(
to_precision >= 1,
- "check_overflow: illegal precision: {}",
- to_precision
+ "check_overflow: illegal precision: {to_precision}"
);
Ok(match &args[0] {
diff --git a/native-engine/datafusion-ext-functions/src/spark_crypto.rs
b/native-engine/datafusion-ext-functions/src/spark_crypto.rs
index de406f30..f774ab99 100644
--- a/native-engine/datafusion-ext-functions/src/spark_crypto.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_crypto.rs
@@ -116,6 +116,7 @@ mod tests {
/// Helper function to run a test for a given hash function and scalar
/// input.
+ #[allow(clippy::panic)]
fn run_scalar_test(
// Accepts any function that matches the signature of the spark_sha*
functions.
hash_fn: impl Fn(&[ColumnarValue]) -> DataFusionResult<ColumnarValue>,
diff --git a/native-engine/datafusion-ext-functions/src/spark_dates.rs
b/native-engine/datafusion-ext-functions/src/spark_dates.rs
index 3ea5e23f..800b0e53 100644
--- a/native-engine/datafusion-ext-functions/src/spark_dates.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_dates.rs
@@ -67,7 +67,7 @@ pub fn spark_quarter(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
let quarter = Int32Array::from_iter(
month_arr
.iter()
- .map(|opt_m| opt_m.map(|m| ((m - 1) / 3 + 1) as i32)),
+ .map(|opt_m| opt_m.map(|m| ((m - 1) / 3 + 1))),
);
Ok(ColumnarValue::Array(Arc::new(quarter)))
diff --git
a/native-engine/datafusion-ext-functions/src/spark_get_json_object.rs
b/native-engine/datafusion-ext-functions/src/spark_get_json_object.rs
index 9d5c4078..3af47fd3 100644
--- a/native-engine/datafusion-ext-functions/src/spark_get_json_object.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_get_json_object.rs
@@ -151,7 +151,7 @@ pub fn spark_get_parsed_json_object(args: &[ColumnarValue])
-> Result<ColumnarVa
}
};
- let fallback_results = parse_fallback(&path_string, json_array)?;
+ let fallback_results = parse_fallback(path_string, json_array)?;
let mut fallback_results_iter = fallback_results.iter();
let output = StringArray::from_iter(json_array.iter().map(|value| {
@@ -187,7 +187,7 @@ pub fn spark_get_parsed_json_simple_field(
field: &String,
) -> Result<ArrayRef> {
let json_array = downcast_any!(parsed_json_array, UserDefinedArray)?;
- let fallback_results = parse_fallback(&field, json_array)?;
+ let fallback_results = parse_fallback(field, json_array)?;
let mut fallback_results_iter = fallback_results.iter();
let output = StringArray::from_iter(json_array.iter().map(|value| {
@@ -482,7 +482,7 @@ impl HiveGetJsonObjectMatcher {
};
} else if let serde_json::Value::Array(array) = value {
let vs = array
- .into_iter()
+ .iter()
.map(|item| {
if let serde_json::Value::Object(object) = item {
match object.get(child) {
diff --git a/native-engine/datafusion-ext-functions/src/spark_hash.rs
b/native-engine/datafusion-ext-functions/src/spark_hash.rs
index 6b6d8132..f2004c70 100644
--- a/native-engine/datafusion-ext-functions/src/spark_hash.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_hash.rs
@@ -27,7 +27,7 @@ pub fn spark_murmur3_hash(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
spark_hash(args, |len, is_scalar, arrays| {
// use identical seed as spark hash partition
let spark_murmur3_default_seed = 42i32;
- let hash_buffer = create_murmur3_hashes(len, &arrays,
spark_murmur3_default_seed);
+ let hash_buffer = create_murmur3_hashes(len, arrays,
spark_murmur3_default_seed);
if is_scalar {
ColumnarValue::Scalar(ScalarValue::from(hash_buffer[0]))
} else {
diff --git a/native-engine/datafusion-ext-functions/src/spark_initcap.rs
b/native-engine/datafusion-ext-functions/src/spark_initcap.rs
index 7c5218fc..beebd160 100644
--- a/native-engine/datafusion-ext-functions/src/spark_initcap.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_initcap.rs
@@ -27,7 +27,7 @@ pub fn string_initcap(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
ColumnarValue::Array(array) => {
let input_array = as_string_array(array)?;
let output_array =
- StringArray::from_iter(input_array.into_iter().map(|s|
s.map(|s| initcap(s))));
+ StringArray::from_iter(input_array.into_iter().map(|s|
s.map(initcap)));
Ok(ColumnarValue::Array(Arc::new(output_array) as ArrayRef))
}
ColumnarValue::Scalar(ScalarValue::Utf8(Some(str))) => {
diff --git a/native-engine/datafusion-ext-functions/src/spark_make_decimal.rs
b/native-engine/datafusion-ext-functions/src/spark_make_decimal.rs
index ad22e6d0..228c2b44 100644
--- a/native-engine/datafusion-ext-functions/src/spark_make_decimal.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_make_decimal.rs
@@ -33,8 +33,7 @@ pub fn spark_make_decimal(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
};
assert!(
precision >= 1,
- "make_decimal: illegal precision: {}",
- precision
+ "make_decimal: illegal precision: {precision}"
);
Ok(match &args[0] {
diff --git a/native-engine/datafusion-ext-functions/src/spark_round.rs
b/native-engine/datafusion-ext-functions/src/spark_round.rs
index e8de0e57..3ceb5d13 100644
--- a/native-engine/datafusion-ext-functions/src/spark_round.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_round.rs
@@ -293,6 +293,7 @@ mod tests {
/// Unit test for `spark_round()` verifying Spark-style half-away-from-zero
/// rounding on scalar Float64.
+ #[allow(clippy::panic)]
#[test]
fn test_round_scalar() -> Result<()> {
let s = ColumnarValue::Scalar(ScalarValue::Float64(Some(-1.5)));
diff --git a/native-engine/datafusion-ext-functions/src/spark_strings.rs
b/native-engine/datafusion-ext-functions/src/spark_strings.rs
index 40b1c1c7..6b514b0d 100644
--- a/native-engine/datafusion-ext-functions/src/spark_strings.rs
+++ b/native-engine/datafusion-ext-functions/src/spark_strings.rs
@@ -228,7 +228,7 @@ pub fn string_concat_ws(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
return Ok(Arg::Ignore);
}
if let ScalarValue::Utf8(Some(s)) = scalar {
- return Ok(Arg::Literal(&s));
+ return Ok(Arg::Literal(s));
}
if let ScalarValue::List(l) = scalar
&& l.data_type() == &DataType::Utf8
diff --git a/native-engine/datafusion-ext-plans/Cargo.toml
b/native-engine/datafusion-ext-plans/Cargo.toml
index e6dff419..d3f86eb6 100644
--- a/native-engine/datafusion-ext-plans/Cargo.toml
+++ b/native-engine/datafusion-ext-plans/Cargo.toml
@@ -21,6 +21,9 @@ version = "0.1.0"
edition = "2024"
resolver = "1"
+[lints]
+workspace = true
+
[features]
default = ["tokio/rt-multi-thread"]
diff --git a/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
b/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
index 15dd82bf..41d23e39 100644
--- a/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
+++ b/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
@@ -356,6 +356,7 @@ impl AccUDAFBufferRowsColumn {
}
}
+#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
impl AccColumn for AccUDAFBufferRowsColumn {
fn as_any(&self) -> &dyn Any {
self
diff --git a/native-engine/datafusion-ext-plans/src/common/execution_context.rs
b/native-engine/datafusion-ext-plans/src/common/execution_context.rs
index 8f2dd14f..6bfa8279 100644
--- a/native-engine/datafusion-ext-plans/src/common/execution_context.rs
+++ b/native-engine/datafusion-ext-plans/src/common/execution_context.rs
@@ -76,6 +76,7 @@ pub struct ExecutionContext {
input_stat_metrics: Arc<OnceCell<Option<InputBatchStatistics>>>,
}
+#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
impl ExecutionContext {
pub fn new(
task_ctx: Arc<TaskContext>,
@@ -690,6 +691,7 @@ pub struct WrappedSender<T: RecordBatchWithPayload> {
exclude_time: OnceCell<Time>,
}
+#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
impl<T: RecordBatchWithPayload> WrappedSender<T> {
pub fn new(exec_ctx: Arc<ExecutionContext>, sender: Sender<Result<T>>) ->
Arc<Self> {
let wrapped = Arc::new(Self {
diff --git a/native-engine/datafusion-ext-plans/src/common/row_null_checker.rs
b/native-engine/datafusion-ext-plans/src/common/row_null_checker.rs
index 50a7cdfe..cbdb87b0 100644
--- a/native-engine/datafusion-ext-plans/src/common/row_null_checker.rs
+++ b/native-engine/datafusion-ext-plans/src/common/row_null_checker.rs
@@ -64,6 +64,7 @@ impl RowNullChecker {
}
/// Create a FieldConfig from DataType and SortOptions
+ #[allow(clippy::panic)] // Temporarily allow panic to refactor to Result
later
fn create_field_config_from_data_type(
data_type: &DataType,
sort_options: SortOptions,
diff --git a/native-engine/datafusion-ext-plans/src/shuffle/buffered_data.rs
b/native-engine/datafusion-ext-plans/src/shuffle/buffered_data.rs
index f29f6d21..40524296 100644
--- a/native-engine/datafusion-ext-plans/src/shuffle/buffered_data.rs
+++ b/native-engine/datafusion-ext-plans/src/shuffle/buffered_data.rs
@@ -246,6 +246,7 @@ impl<'a> PartitionedBatchesIterator<'a> {
}
/// all iterators returned should have been fully consumed
+ #[allow(clippy::panic)] // Temporarily allow panic to refactor to Result
later
pub fn next_partition_chunk(
&mut self,
) -> Option<(usize, impl Iterator<Item = RecordBatch> + 'a)> {