This is an automated email from the ASF dual-hosted git repository.
csy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/auron.git
The following commit(s) were added to refs/heads/master by this push:
new c0f13a08 [AURON #1877] Fix spelling errors via the typos tool (#1878)
c0f13a08 is described below
commit c0f13a08d4ab99439f613178adf32fbfc9fb5866
Author: Thomas <[email protected]>
AuthorDate: Sun Jan 11 22:26:43 2026 +0800
[AURON #1877] Fix spelling errors via the typos tool (#1878)
# Which issue does this PR close?
Closes #1877
# Rationale for this change
# What changes are included in this PR?
# Are there any user-facing changes?
# How was this patch tested?
---
native-engine/auron-jni-bridge/src/jni_bridge.rs | 2 +-
native-engine/auron-serde/src/from_proto.rs | 4 ++--
native-engine/datafusion-ext-commons/src/arrow/selection.rs | 2 +-
native-engine/datafusion-ext-commons/src/io/ipc_compression.rs | 2 +-
native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs | 2 +-
native-engine/datafusion-ext-exprs/src/string_ends_with.rs | 4 ++--
.../datafusion-ext-plans/src/common/cached_exprs_evaluator.rs | 2 +-
native-engine/datafusion-ext-plans/src/sort_exec.rs | 2 +-
.../org/apache/spark/sql/execution/auron/plan/NativeWindowBase.scala | 2 +-
9 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/native-engine/auron-jni-bridge/src/jni_bridge.rs
b/native-engine/auron-jni-bridge/src/jni_bridge.rs
index 90614cc2..6ea65ca8 100644
--- a/native-engine/auron-jni-bridge/src/jni_bridge.rs
+++ b/native-engine/auron-jni-bridge/src/jni_bridge.rs
@@ -540,7 +540,7 @@ impl JavaClasses<'static> {
pub fn get() -> &'static JavaClasses<'static> {
unsafe {
- // safety: JNI_JAVA_CLASSES must be initialized frist
+ // safety: JNI_JAVA_CLASSES must be initialized first
JNI_JAVA_CLASSES.get_unchecked()
}
}
diff --git a/native-engine/auron-serde/src/from_proto.rs
b/native-engine/auron-serde/src/from_proto.rs
index 0caaad6c..09c700db 100644
--- a/native-engine/auron-serde/src/from_proto.rs
+++ b/native-engine/auron-serde/src/from_proto.rs
@@ -1075,7 +1075,7 @@ fn try_parse_physical_sort_expr(
input: &Arc<dyn ExecutionPlan>,
sort: &Box<SortExecNode>,
) -> Result<Vec<PhysicalSortExpr>, PlanSerDeError> {
- let pyhsical_sort_expr = sort
+ let physical_sort_expr = sort
.expr
.iter()
.map(|expr| {
@@ -1111,7 +1111,7 @@ fn try_parse_physical_sort_expr(
}
})
.collect::<Result<Vec<_>, _>>()?;
- Ok(pyhsical_sort_expr)
+ Ok(physical_sort_expr)
}
pub fn parse_protobuf_partitioning(
diff --git a/native-engine/datafusion-ext-commons/src/arrow/selection.rs
b/native-engine/datafusion-ext-commons/src/arrow/selection.rs
index bf67bc28..32699086 100644
--- a/native-engine/datafusion-ext-commons/src/arrow/selection.rs
+++ b/native-engine/datafusion-ext-commons/src/arrow/selection.rs
@@ -216,7 +216,7 @@ pub fn create_array_interleaver(
NonNull::new_unchecked(take_value_ptrs[take_value_ptr_idx]
as *mut u8);
take_value_ptr_idx += 1;
- // for continous elements, just extend the area to copy
+ // for continuous elements, just extend the area to copy
// otherwise, copy current area end move to the next area
if src_end_ptr != value_ptr {
prefetch_read_data!(value_ptr.as_ptr()); // prefetch next
while copying current
diff --git a/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
b/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
index d3ea0b29..817b2c8e 100644
--- a/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
+++ b/native-engine/datafusion-ext-commons/src/io/ipc_compression.rs
@@ -204,7 +204,7 @@ impl<W: Write> IoCompressionWriter<W> {
match self {
IoCompressionWriter::LZ4(w) => {
w.try_finish()
- .or_else(|_| df_execution_err!("ipc compresion error"))?;
+ .or_else(|_| df_execution_err!("ipc compression error"))?;
}
IoCompressionWriter::ZSTD(w) => {
w.do_finish()?;
diff --git a/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
b/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
index e3c43e6e..075865cc 100644
--- a/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
+++ b/native-engine/datafusion-ext-exprs/src/spark_udf_wrapper.rs
@@ -209,7 +209,7 @@ fn invoke_udf(
params_batch: RecordBatch,
result_schema: SchemaRef,
) -> Result<ArrayRef> {
- // evalute via context
+ // evaluate via context
let struct_array = StructArray::from(params_batch);
let mut export_ffi_array = FFI_ArrowArray::new(&struct_array.to_data());
let mut import_ffi_array = FFI_ArrowArray::empty();
diff --git a/native-engine/datafusion-ext-exprs/src/string_ends_with.rs
b/native-engine/datafusion-ext-exprs/src/string_ends_with.rs
index 1432a24c..1d1fd958 100644
--- a/native-engine/datafusion-ext-exprs/src/string_ends_with.rs
+++ b/native-engine/datafusion-ext-exprs/src/string_ends_with.rs
@@ -139,10 +139,10 @@ mod test {
Some("rr".to_string()),
Some("roser r".to_string()),
]));
- // create a shema with the field
+ // create a schema with the field
let schema = Arc::new(Schema::new(vec![Field::new("col2",
DataType::Utf8, true)]));
- // create a RecordBatch with the shema and StringArray
+ // create a RecordBatch with the schema and StringArray
let batch =
RecordBatch::try_new(schema, vec![string_array]).expect("Error
creating RecordBatch");
diff --git
a/native-engine/datafusion-ext-plans/src/common/cached_exprs_evaluator.rs
b/native-engine/datafusion-ext-plans/src/common/cached_exprs_evaluator.rs
index 1c7ec79f..fe77a438 100644
--- a/native-engine/datafusion-ext-plans/src/common/cached_exprs_evaluator.rs
+++ b/native-engine/datafusion-ext-plans/src/common/cached_exprs_evaluator.rs
@@ -402,7 +402,7 @@ impl PhysicalExpr for CachedExpr {
}
}
-/// A struct holding all evaluated values of cachable expressions
+/// A struct holding all evaluated values of cacheable expressions
#[derive(Clone)]
struct Cache {
values: Arc<Mutex<Vec<Option<ColumnarValue>>>>,
diff --git a/native-engine/datafusion-ext-plans/src/sort_exec.rs
b/native-engine/datafusion-ext-plans/src/sort_exec.rs
index 24d0beb6..8e20bc41 100644
--- a/native-engine/datafusion-ext-plans/src/sort_exec.rs
+++ b/native-engine/datafusion-ext-plans/src/sort_exec.rs
@@ -1266,7 +1266,7 @@ impl KeyCollector for InMemRowsKeyCollector {
assert_eq!(self.offsets.len() - 1, num_rows);
let mut rows = row_converter.empty_rows(0, 0);
unsafe {
- // safety: acces rows.buffer/offsets
+ // safety: access rows.buffer/offsets
struct XRows {
buffer: Vec<u8>,
offsets: Vec<usize>,
diff --git
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowBase.scala
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowBase.scala
index 2e849536..a7a0792b 100644
---
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowBase.scala
+++
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowBase.scala
@@ -212,7 +212,7 @@ abstract class NativeWindowBase(
.addAllPartitionSpec(nativePartitionSpecExprs.asJava)
.addAllOrderSpec(nativeOrderSpecExprs.asJava)
- // WindowGrupLimitExec does not output window cols
+ // WindowGroupLimitExec does not output window cols
groupLimit match {
case Some(limit) =>
nativeWindowExec.setGroupLimit(WindowGroupLimit.newBuilder().setK(limit))