This is an automated email from the ASF dual-hosted git repository.
richox pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/auron.git
The following commit(s) were added to refs/heads/master by this push:
new 6ac9e89c [AURON #1912] Clean up rust default lints (#2039)
6ac9e89c is described below
commit 6ac9e89c4c5fc9ad30fe2f93a55bac82d74bd032
Author: yew1eb <[email protected]>
AuthorDate: Tue Mar 17 14:52:52 2026 +0800
[AURON #1912] Clean up rust default lints (#2039)
<!--
- Start the PR title with the related issue ID, e.g. '[AURON #XXXX]
Short summary...'.
-->
# Which issue does this PR close?
Closes #1912
# Rationale for this change
# What changes are included in this PR?
# Are there any user-facing changes?
# How was this patch tested?
---
Cargo.toml | 8 --------
native-engine/auron-planner/src/planner.rs | 4 ++--
.../datafusion-ext-commons/src/arrow/eq_comparator.rs | 11 ++++++-----
native-engine/datafusion-ext-exprs/src/get_indexed_field.rs | 2 +-
native-engine/datafusion-ext-exprs/src/lib.rs | 1 +
.../datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs | 1 -
native-engine/datafusion-ext-functions/src/lib.rs | 2 +-
.../datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs | 8 ++------
native-engine/datafusion-ext-plans/src/limit_exec.rs | 3 ++-
native-engine/datafusion-ext-plans/src/parquet_exec.rs | 2 ++
native-engine/datafusion-ext-plans/src/parquet_sink_exec.rs | 1 +
.../datafusion-ext-plans/src/rss_shuffle_writer_exec.rs | 2 +-
native-engine/datafusion-ext-plans/src/shuffle_writer_exec.rs | 2 +-
native-engine/datafusion-ext-plans/src/sort_exec.rs | 7 ++++---
14 files changed, 24 insertions(+), 30 deletions(-)
diff --git a/Cargo.toml b/Cargo.toml
index da46dc69..e08aca03 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -27,14 +27,6 @@ members = [
"native-engine/auron-memmgr",
]
-[workspace.lints.rust]
-# Pending processing (temporarily allow)
-unused_variables = "allow"
-dead_code = "allow"
-unused_imports = "allow"
-unused_must_use = "allow"
-deprecated = "allow"
-
[workspace.lints.clippy]
unwrap_used = "deny"
panic = "deny"
diff --git a/native-engine/auron-planner/src/planner.rs
b/native-engine/auron-planner/src/planner.rs
index a058c67b..d4f82191 100644
--- a/native-engine/auron-planner/src/planner.rs
+++ b/native-engine/auron-planner/src/planner.rs
@@ -27,7 +27,7 @@ use arrow::{
};
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD};
use datafusion::{
- common::{ExprSchema, Result, ScalarValue, stats::Precision},
+ common::{Result, ScalarValue, stats::Precision},
datasource::{
file_format::file_compression_type::FileCompressionType,
listing::{FileRange, PartitionedFile},
@@ -40,7 +40,7 @@ use datafusion::{
expressions::{LikeExpr, SCAndExpr, SCOrExpr, in_list},
},
physical_plan::{
- ColumnStatistics, ExecutionPlan, PhysicalExpr, Statistics, expressions
as phys_expr,
+ ColumnStatistics, ExecutionPlan, Statistics, expressions as phys_expr,
expressions::{
BinaryExpr, CaseExpr, CastExpr, Column, IsNotNullExpr, IsNullExpr,
Literal,
NegativeExpr, NotExpr, PhysicalSortExpr,
diff --git a/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
b/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
index 1df97694..9bb04bf6 100644
--- a/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
+++ b/native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs
@@ -743,11 +743,12 @@ pub mod tests {
}
#[test]
- fn test_bytes() {
- test_bytes_impl::<Utf8Type>();
- test_bytes_impl::<LargeUtf8Type>();
- test_bytes_impl::<BinaryType>();
- test_bytes_impl::<LargeBinaryType>();
+ fn test_bytes() -> Result<()> {
+ let _ = test_bytes_impl::<Utf8Type>();
+ let _ = test_bytes_impl::<LargeUtf8Type>();
+ let _ = test_bytes_impl::<BinaryType>();
+ let _ = test_bytes_impl::<LargeBinaryType>();
+ Ok(())
}
#[test]
diff --git a/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
b/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
index 63448f9d..33ba456b 100644
--- a/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
+++ b/native-engine/datafusion-ext-exprs/src/get_indexed_field.rs
@@ -17,7 +17,7 @@ use std::{
any::Any,
convert::TryInto,
fmt::{Debug, Formatter},
- hash::{Hash, Hasher},
+ hash::Hash,
sync::Arc,
};
diff --git a/native-engine/datafusion-ext-exprs/src/lib.rs
b/native-engine/datafusion-ext-exprs/src/lib.rs
index c6732b57..6400f7d2 100644
--- a/native-engine/datafusion-ext-exprs/src/lib.rs
+++ b/native-engine/datafusion-ext-exprs/src/lib.rs
@@ -31,6 +31,7 @@ pub mod string_contains;
pub mod string_ends_with;
pub mod string_starts_with;
+#[allow(dead_code)]
fn down_cast_any_ref(any: &dyn Any) -> &dyn Any {
if any.is::<PhysicalExprRef>() {
any.downcast_ref::<PhysicalExprRef>()
diff --git
a/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
b/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
index c0ae33ee..44eaf9e3 100644
--- a/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
+++ b/native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs
@@ -17,7 +17,6 @@ use std::{
any::Any,
fmt::{Debug, Display, Formatter},
hash::{Hash, Hasher},
- io::Write,
sync::Arc,
};
diff --git a/native-engine/datafusion-ext-functions/src/lib.rs
b/native-engine/datafusion-ext-functions/src/lib.rs
index a65dc0d4..9464722e 100644
--- a/native-engine/datafusion-ext-functions/src/lib.rs
+++ b/native-engine/datafusion-ext-functions/src/lib.rs
@@ -38,7 +38,7 @@ mod spark_unscaled_value;
#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
pub fn create_auron_ext_function(
name: &str,
- spark_partition_id: usize,
+ #[allow(unused_variables)] spark_partition_id: usize,
) -> Result<ScalarFunctionImplementation> {
// auron ext functions, if used for spark should be start with 'Spark_',
// if used for flink should be start with 'Flink_',
diff --git a/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
b/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
index 8ca351fd..ffb2de1c 100644
--- a/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
+++ b/native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs
@@ -16,15 +16,11 @@
use std::{
any::Any,
fmt::{Debug, Display, Formatter},
- io::{Cursor, Read, Write},
sync::Arc,
};
use arrow::{
- array::{
- Array, ArrayAccessor, ArrayRef, BinaryArray, BinaryBuilder,
StructArray, as_struct_array,
- make_array,
- },
+ array::{Array, ArrayRef, StructArray, as_struct_array, make_array},
datatypes::{DataType, Field, Schema, SchemaRef},
ffi::{FFI_ArrowArray, FFI_ArrowSchema, from_ffi, from_ffi_and_data_type},
record_batch::{RecordBatch, RecordBatchOptions},
@@ -39,7 +35,7 @@ use datafusion::{
physical_expr::PhysicalExprRef,
};
use datafusion_ext_commons::{
- UninitializedInit, downcast_any,
+ downcast_any,
io::{read_len, write_len},
};
use jni::objects::{GlobalRef, JObject};
diff --git a/native-engine/datafusion-ext-plans/src/limit_exec.rs
b/native-engine/datafusion-ext-plans/src/limit_exec.rs
index dd45ed12..4cdeb029 100644
--- a/native-engine/datafusion-ext-plans/src/limit_exec.rs
+++ b/native-engine/datafusion-ext-plans/src/limit_exec.rs
@@ -120,7 +120,7 @@ impl ExecutionPlan for LimitExec {
fn statistics(&self) -> Result<Statistics> {
Statistics::with_fetch(
- self.input.statistics()?,
+ self.input.partition_statistics(None)?,
self.schema(),
Some(self.limit),
self.offset,
@@ -194,6 +194,7 @@ fn execute_limit_with_offset(
#[cfg(test)]
mod test {
+ #![allow(deprecated)]
use std::sync::Arc;
use arrow::{
diff --git a/native-engine/datafusion-ext-plans/src/parquet_exec.rs
b/native-engine/datafusion-ext-plans/src/parquet_exec.rs
index da272c80..c4f1e717 100644
--- a/native-engine/datafusion-ext-plans/src/parquet_exec.rs
+++ b/native-engine/datafusion-ext-plans/src/parquet_exec.rs
@@ -17,6 +17,7 @@
//! Execution plan for reading Parquet files
+#![allow(deprecated)] // Deprecated method: fetch_parquet_metadata
use std::{any::Any, fmt, fmt::Formatter, ops::Range, pin::Pin, sync::Arc};
use arrow::datatypes::SchemaRef;
@@ -465,6 +466,7 @@ impl AsyncFileReader for ParquetFileReaderRef {
}
}
+#[allow(dead_code)]
fn expr_contains_decimal_type(expr: &PhysicalExprRef, schema: &SchemaRef) ->
Result<bool> {
if matches!(expr.data_type(schema)?, DataType::Decimal128(..)) {
return Ok(true);
diff --git a/native-engine/datafusion-ext-plans/src/parquet_sink_exec.rs
b/native-engine/datafusion-ext-plans/src/parquet_sink_exec.rs
index 72be4e55..5dcc72e3 100644
--- a/native-engine/datafusion-ext-plans/src/parquet_sink_exec.rs
+++ b/native-engine/datafusion-ext-plans/src/parquet_sink_exec.rs
@@ -15,6 +15,7 @@
// specific language governing permissions and limitations
// under the License.
+#![allow(deprecated)] // Deprecated method: set_max_statistics_size
use std::{any::Any, fmt::Formatter, io::Write, sync::Arc};
use arrow::{
diff --git a/native-engine/datafusion-ext-plans/src/rss_shuffle_writer_exec.rs
b/native-engine/datafusion-ext-plans/src/rss_shuffle_writer_exec.rs
index bad86413..0397a33d 100644
--- a/native-engine/datafusion-ext-plans/src/rss_shuffle_writer_exec.rs
+++ b/native-engine/datafusion-ext-plans/src/rss_shuffle_writer_exec.rs
@@ -179,7 +179,7 @@ impl ExecutionPlan for RssShuffleWriterExec {
}
fn statistics(&self) -> Result<Statistics> {
- self.input.statistics()
+ self.input.partition_statistics(None)
}
}
diff --git a/native-engine/datafusion-ext-plans/src/shuffle_writer_exec.rs
b/native-engine/datafusion-ext-plans/src/shuffle_writer_exec.rs
index 32c2dc14..272cd7db 100644
--- a/native-engine/datafusion-ext-plans/src/shuffle_writer_exec.rs
+++ b/native-engine/datafusion-ext-plans/src/shuffle_writer_exec.rs
@@ -174,7 +174,7 @@ impl ExecutionPlan for ShuffleWriterExec {
}
fn statistics(&self) -> Result<Statistics> {
- self.input.statistics()
+ self.input.partition_statistics(None)
}
}
diff --git a/native-engine/datafusion-ext-plans/src/sort_exec.rs
b/native-engine/datafusion-ext-plans/src/sort_exec.rs
index cc66f862..8b56b6eb 100644
--- a/native-engine/datafusion-ext-plans/src/sort_exec.rs
+++ b/native-engine/datafusion-ext-plans/src/sort_exec.rs
@@ -209,7 +209,7 @@ impl ExecutionPlan for SortExec {
fn statistics(&self) -> Result<Statistics> {
Statistics::with_fetch(
- self.input.statistics()?,
+ self.input.partition_statistics(None)?,
self.schema(),
self.limit,
self.offset,
@@ -718,7 +718,7 @@ impl ExternalSorter {
if !in_mem_blocks.is_empty() {
let mut merger = Merger::try_new(self.clone(), in_mem_blocks)?;
if self.skip > 0 {
- merger.skip_rows::<InMemRowsKeyCollector>(self.skip,
output_batch_size);
+ let _ =
merger.skip_rows::<InMemRowsKeyCollector>(self.skip, output_batch_size);
}
while let Some((key_collector, pruned_batch)) =
merger.next::<InMemRowsKeyCollector>(output_batch_size)?
@@ -744,7 +744,7 @@ impl ExternalSorter {
let spill_blocks = spills.into_iter().map(|spill|
spill.block).collect();
let mut merger = Merger::try_new(self.to_arc(), spill_blocks)?;
if self.skip > 0 {
- merger.skip_rows::<InMemRowsKeyCollector>(self.skip,
output_batch_size);
+ let _ = merger.skip_rows::<InMemRowsKeyCollector>(self.skip,
output_batch_size);
}
while let Some((key_collector, pruned_batch)) =
merger.next::<InMemRowsKeyCollector>(output_batch_size)?
@@ -1580,6 +1580,7 @@ mod test {
#[cfg(test)]
mod fuzztest {
+ #![allow(deprecated)]
use std::{sync::Arc, time::Instant};
use arrow::{