This is an automated email from the ASF dual-hosted git repository.
alamb pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git
The following commit(s) were added to refs/heads/main by this push:
new 5505113d97 Fix clippy for Rust 1.88 release (#7797)
5505113d97 is described below
commit 5505113d9745aba2cb46df2fd11a4b3d9672d5d2
Author: Andrew Lamb <[email protected]>
AuthorDate: Fri Jun 27 06:30:53 2025 -0400
Fix clippy for Rust 1.88 release (#7797)
# Which issue does this PR close?
- Closes https://github.com/apache/arrow-rs/issues/7796
# Rationale for this change
CI started failing after clippy got more strict
# What changes are included in this PR?
Apply clippy fixes
# Are these changes tested?
Yes by CI
# Are there any user-facing changes?
No
---
arrow-arith/src/arithmetic.rs | 5 ++--
arrow-arith/src/numeric.rs | 18 +++++---------
arrow-arith/src/temporal.rs | 2 +-
arrow-array/src/arithmetic.rs | 28 ++++++++++++++++++----
arrow-array/src/array/fixed_size_binary_array.rs | 2 +-
arrow-array/src/array/fixed_size_list_array.rs | 2 +-
arrow-array/src/array/primitive_array.rs | 12 +++++-----
arrow-array/src/array/union_array.rs | 2 +-
.../builder/generic_bytes_dictionary_builder.rs | 2 +-
arrow-array/src/ffi.rs | 2 +-
arrow-array/src/types.rs | 28 +++++-----------------
arrow-avro/benches/avro_reader.rs | 16 ++++++-------
arrow-avro/examples/read_with_utf8view.rs | 6 ++---
arrow-buffer/src/util/bit_mask.rs | 6 ++---
arrow-cast/src/cast/decimal.rs | 3 +--
arrow-cast/src/cast/mod.rs | 12 ++++------
arrow-cast/src/display.rs | 8 +++----
arrow-cast/src/parse.rs | 3 +--
arrow-data/src/transform/run.rs | 10 ++------
arrow-flight/examples/flight_sql_server.rs | 4 ++--
arrow-flight/src/error.rs | 12 +++++-----
arrow-flight/src/sql/metadata/mod.rs | 3 +--
arrow-flight/src/sql/server.rs | 2 +-
arrow-integration-testing/src/lib.rs | 5 ++--
arrow-ipc/src/reader.rs | 3 +--
arrow-ipc/src/reader/stream.rs | 2 +-
arrow-ipc/src/writer.rs | 8 +++----
arrow-json/src/reader/mod.rs | 8 ++-----
arrow-json/src/reader/schema.rs | 3 +--
arrow-json/src/reader/string_view_array.rs | 8 +++----
arrow-json/src/reader/struct_array.rs | 3 +--
arrow-json/src/writer/encoder.rs | 3 +--
arrow-json/src/writer/mod.rs | 2 +-
arrow-pyarrow/src/lib.rs | 6 ++---
arrow-pyarrow/tests/pyarrow.rs | 4 ++--
arrow-row/src/lib.rs | 5 ++--
arrow-row/src/list.rs | 3 +--
arrow-select/src/coalesce.rs | 2 +-
parquet-variant/src/to_json.rs | 1 -
parquet/benches/arrow_reader_clickbench.rs | 7 +++---
parquet/benches/arrow_reader_row_filter.rs | 2 +-
parquet/benches/encoding.rs | 4 ++--
parquet/examples/external_metadata.rs | 2 +-
parquet/src/arrow/async_writer/mod.rs | 2 +-
parquet/src/arrow/buffer/view_buffer.rs | 2 +-
parquet/src/basic.rs | 9 ++++---
parquet/src/bin/parquet-show-bloom-filter.rs | 4 ++--
parquet/src/encryption/ciphers.rs | 2 +-
parquet/src/file/properties.rs | 4 ++--
parquet/src/file/statistics.rs | 3 +--
parquet/src/schema/printer.rs | 2 +-
parquet/src/thrift.rs | 4 ++--
parquet/tests/arrow_reader/bad_data.rs | 2 +-
parquet/tests/arrow_reader/checksum.rs | 2 +-
parquet/tests/encryption/encryption_util.rs | 7 +++---
55 files changed, 140 insertions(+), 172 deletions(-)
diff --git a/arrow-arith/src/arithmetic.rs b/arrow-arith/src/arithmetic.rs
index febf5ceabd..768fd798c0 100644
--- a/arrow-arith/src/arithmetic.rs
+++ b/arrow-arith/src/arithmetic.rs
@@ -43,8 +43,7 @@ fn get_fixed_point_info(
if required_scale > product_scale {
return Err(ArrowError::ComputeError(format!(
- "Required scale {} is greater than product scale {}",
- required_scale, product_scale
+ "Required scale {required_scale} is greater than product scale
{product_scale}",
)));
}
@@ -122,7 +121,7 @@ pub fn multiply_fixed_point_checked(
let mut mul = a.wrapping_mul(b);
mul = divide_and_round::<Decimal256Type>(mul, divisor);
mul.to_i128().ok_or_else(|| {
- ArrowError::ArithmeticOverflow(format!("Overflow happened on: {:?}
* {:?}", a, b))
+ ArrowError::ArithmeticOverflow(format!("Overflow happened on:
{a:?} * {b:?}"))
})
})
.and_then(|a| a.with_precision_and_scale(precision, required_scale))
diff --git a/arrow-arith/src/numeric.rs b/arrow-arith/src/numeric.rs
index 2cf8fa43a9..0bcf300032 100644
--- a/arrow-arith/src/numeric.rs
+++ b/arrow-arith/src/numeric.rs
@@ -574,10 +574,7 @@ impl DateOp for Date32Type {
impl DateOp for Date64Type {
fn add_year_month(left: Self::Native, right: i32) -> Result<Self::Native,
ArrowError> {
Self::add_year_months_opt(left, right).ok_or_else(|| {
- ArrowError::ComputeError(format!(
- "Date arithmetic overflow: {} + {} months",
- left, right
- ))
+ ArrowError::ComputeError(format!("Date arithmetic overflow: {left}
+ {right} months",))
})
}
@@ -586,7 +583,7 @@ impl DateOp for Date64Type {
right: IntervalDayTime,
) -> Result<Self::Native, ArrowError> {
Self::add_day_time_opt(left, right).ok_or_else(|| {
- ArrowError::ComputeError(format!("Date arithmetic overflow: {} +
{:?}", left, right))
+ ArrowError::ComputeError(format!("Date arithmetic overflow: {left}
+ {right:?}"))
})
}
@@ -595,16 +592,13 @@ impl DateOp for Date64Type {
right: IntervalMonthDayNano,
) -> Result<Self::Native, ArrowError> {
Self::add_month_day_nano_opt(left, right).ok_or_else(|| {
- ArrowError::ComputeError(format!("Date arithmetic overflow: {} +
{:?}", left, right))
+ ArrowError::ComputeError(format!("Date arithmetic overflow: {left}
+ {right:?}"))
})
}
fn sub_year_month(left: Self::Native, right: i32) -> Result<Self::Native,
ArrowError> {
Self::subtract_year_months_opt(left, right).ok_or_else(|| {
- ArrowError::ComputeError(format!(
- "Date arithmetic overflow: {} - {} months",
- left, right
- ))
+ ArrowError::ComputeError(format!("Date arithmetic overflow: {left}
- {right} months",))
})
}
@@ -613,7 +607,7 @@ impl DateOp for Date64Type {
right: IntervalDayTime,
) -> Result<Self::Native, ArrowError> {
Self::subtract_day_time_opt(left, right).ok_or_else(|| {
- ArrowError::ComputeError(format!("Date arithmetic overflow: {} -
{:?}", left, right))
+ ArrowError::ComputeError(format!("Date arithmetic overflow: {left}
- {right:?}"))
})
}
@@ -622,7 +616,7 @@ impl DateOp for Date64Type {
right: IntervalMonthDayNano,
) -> Result<Self::Native, ArrowError> {
Self::subtract_month_day_nano_opt(left, right).ok_or_else(|| {
- ArrowError::ComputeError(format!("Date arithmetic overflow: {} -
{:?}", left, right))
+ ArrowError::ComputeError(format!("Date arithmetic overflow: {left}
- {right:?}"))
})
}
}
diff --git a/arrow-arith/src/temporal.rs b/arrow-arith/src/temporal.rs
index 0b2b98b67b..c62eec281d 100644
--- a/arrow-arith/src/temporal.rs
+++ b/arrow-arith/src/temporal.rs
@@ -79,7 +79,7 @@ pub enum DatePart {
impl std::fmt::Display for DatePart {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(f, "{:?}", self)
+ write!(f, "{self:?}")
}
}
diff --git a/arrow-array/src/arithmetic.rs b/arrow-array/src/arithmetic.rs
index b5f4a106f5..38717807b7 100644
--- a/arrow-array/src/arithmetic.rs
+++ b/arrow-array/src/arithmetic.rs
@@ -418,15 +418,35 @@ native_type_float_op!(
f32,
0.,
1.,
- unsafe { std::mem::transmute(-1_i32) },
- unsafe { std::mem::transmute(i32::MAX) }
+ unsafe {
+ // Need to allow in clippy because
+ // current MSRV (Minimum Supported Rust Version) is `1.81.0` but this
item is stable since `1.87.0`
+ #[allow(unnecessary_transmutes)]
+ std::mem::transmute(-1_i32)
+ },
+ unsafe {
+ // Need to allow in clippy because
+ // current MSRV (Minimum Supported Rust Version) is `1.81.0` but this
item is stable since `1.87.0`
+ #[allow(unnecessary_transmutes)]
+ std::mem::transmute(i32::MAX)
+ }
);
native_type_float_op!(
f64,
0.,
1.,
- unsafe { std::mem::transmute(-1_i64) },
- unsafe { std::mem::transmute(i64::MAX) }
+ unsafe {
+ // Need to allow in clippy because
+ // current MSRV (Minimum Supported Rust Version) is `1.81.0` but this
item is stable since `1.87.0`
+ #[allow(unnecessary_transmutes)]
+ std::mem::transmute(-1_i64)
+ },
+ unsafe {
+ // Need to allow in clippy because
+ // current MSRV (Minimum Supported Rust Version) is `1.81.0` but this
item is stable since `1.87.0`
+ #[allow(unnecessary_transmutes)]
+ std::mem::transmute(i64::MAX)
+ }
);
#[cfg(test)]
diff --git a/arrow-array/src/array/fixed_size_binary_array.rs
b/arrow-array/src/array/fixed_size_binary_array.rs
index 576b801249..55973a58f2 100644
--- a/arrow-array/src/array/fixed_size_binary_array.rs
+++ b/arrow-array/src/array/fixed_size_binary_array.rs
@@ -87,7 +87,7 @@ impl FixedSizeBinaryArray {
) -> Result<Self, ArrowError> {
let data_type = DataType::FixedSizeBinary(size);
let s = size.to_usize().ok_or_else(|| {
- ArrowError::InvalidArgumentError(format!("Size cannot be negative,
got {}", size))
+ ArrowError::InvalidArgumentError(format!("Size cannot be negative,
got {size}"))
})?;
let len = values.len() / s;
diff --git a/arrow-array/src/array/fixed_size_list_array.rs
b/arrow-array/src/array/fixed_size_list_array.rs
index af814cc614..f807cc88fb 100644
--- a/arrow-array/src/array/fixed_size_list_array.rs
+++ b/arrow-array/src/array/fixed_size_list_array.rs
@@ -149,7 +149,7 @@ impl FixedSizeListArray {
nulls: Option<NullBuffer>,
) -> Result<Self, ArrowError> {
let s = size.to_usize().ok_or_else(|| {
- ArrowError::InvalidArgumentError(format!("Size cannot be negative,
got {}", size))
+ ArrowError::InvalidArgumentError(format!("Size cannot be negative,
got {size}"))
})?;
let len = match s {
diff --git a/arrow-array/src/array/primitive_array.rs
b/arrow-array/src/array/primitive_array.rs
index 073ad97744..6fd319aa42 100644
--- a/arrow-array/src/array/primitive_array.rs
+++ b/arrow-array/src/array/primitive_array.rs
@@ -2014,7 +2014,7 @@ mod tests {
.with_timezone("Asia/Taipei".to_string());
assert_eq!(
"PrimitiveArray<Timestamp(Millisecond,
Some(\"Asia/Taipei\"))>\n[\n 2018-12-31T08:00:00+08:00,\n
2018-12-31T08:00:00+08:00,\n 1921-01-02T08:00:00+08:00,\n]",
- format!("{:?}", arr)
+ format!("{arr:?}")
);
}
@@ -2067,7 +2067,7 @@ mod tests {
.with_timezone("America/Denver".to_string());
assert_eq!(
"PrimitiveArray<Timestamp(Millisecond,
Some(\"America/Denver\"))>\n[\n 2022-03-13T01:59:59-07:00,\n
2022-03-13T03:00:00-06:00,\n 2022-11-06T00:59:59-06:00,\n
2022-11-06T01:00:00-06:00,\n]",
- format!("{:?}", arr)
+ format!("{arr:?}")
);
}
@@ -2641,7 +2641,7 @@ mod tests {
None,
]
.into();
- let debug_str = format!("{:?}", array);
+ let debug_str = format!("{array:?}");
assert_eq!("PrimitiveArray<Time32(Second)>\n[\n Cast error: Failed to
convert -1 to temporal for Time32(Second),\n 00:00:00,\n 23:59:59,\n Cast
error: Failed to convert 86400 to temporal for Time32(Second),\n Cast error:
Failed to convert 86401 to temporal for Time32(Second),\n null,\n]",
debug_str
);
@@ -2658,7 +2658,7 @@ mod tests {
None,
]
.into();
- let debug_str = format!("{:?}", array);
+ let debug_str = format!("{array:?}");
assert_eq!("PrimitiveArray<Time32(Millisecond)>\n[\n Cast error:
Failed to convert -1 to temporal for Time32(Millisecond),\n 00:00:00,\n
23:59:59,\n Cast error: Failed to convert 86400000 to temporal for
Time32(Millisecond),\n Cast error: Failed to convert 86401000 to temporal for
Time32(Millisecond),\n null,\n]",
debug_str
);
@@ -2675,7 +2675,7 @@ mod tests {
None,
]
.into();
- let debug_str = format!("{:?}", array);
+ let debug_str = format!("{array:?}");
assert_eq!(
"PrimitiveArray<Time64(Nanosecond)>\n[\n Cast error: Failed to
convert -1 to temporal for Time64(Nanosecond),\n 00:00:00,\n 23:59:59,\n
Cast error: Failed to convert 86400000000000 to temporal for
Time64(Nanosecond),\n Cast error: Failed to convert 86401000000000 to temporal
for Time64(Nanosecond),\n null,\n]",
debug_str
@@ -2693,7 +2693,7 @@ mod tests {
None,
]
.into();
- let debug_str = format!("{:?}", array);
+ let debug_str = format!("{array:?}");
assert_eq!("PrimitiveArray<Time64(Microsecond)>\n[\n Cast error:
Failed to convert -1 to temporal for Time64(Microsecond),\n 00:00:00,\n
23:59:59,\n Cast error: Failed to convert 86400000000 to temporal for
Time64(Microsecond),\n Cast error: Failed to convert 86401000000 to temporal
for Time64(Microsecond),\n null,\n]", debug_str);
}
diff --git a/arrow-array/src/array/union_array.rs
b/arrow-array/src/array/union_array.rs
index 2afe9af473..061bd71a77 100644
--- a/arrow-array/src/array/union_array.rs
+++ b/arrow-array/src/array/union_array.rs
@@ -940,7 +940,7 @@ impl std::fmt::Debug for UnionArray {
if let Some(offsets) = &self.offsets {
writeln!(f, "-- offsets buffer:")?;
- writeln!(f, "{:?}", offsets)?;
+ writeln!(f, "{offsets:?}")?;
}
let fields = match self.data_type() {
diff --git a/arrow-array/src/builder/generic_bytes_dictionary_builder.rs
b/arrow-array/src/builder/generic_bytes_dictionary_builder.rs
index 3713a41123..a2ed91ac90 100644
--- a/arrow-array/src/builder/generic_bytes_dictionary_builder.rs
+++ b/arrow-array/src/builder/generic_bytes_dictionary_builder.rs
@@ -757,7 +757,7 @@ mod tests {
fn test_try_new_from_builder_cast_fails() {
let mut source_builder = StringDictionaryBuilder::<UInt16Type>::new();
for i in 0..257 {
- source_builder.append_value(format!("val{}", i));
+ source_builder.append_value(format!("val{i}"));
}
// there should be too many values that we can't downcast to the
underlying type
diff --git a/arrow-array/src/ffi.rs b/arrow-array/src/ffi.rs
index ac28289e65..f3c34f6ccd 100644
--- a/arrow-array/src/ffi.rs
+++ b/arrow-array/src/ffi.rs
@@ -1576,7 +1576,7 @@ mod tests_from_ffi {
let mut strings = vec![];
for i in 0..1000 {
- strings.push(format!("string: {}", i));
+ strings.push(format!("string: {i}"));
}
let string_array = StringArray::from(strings);
diff --git a/arrow-array/src/types.rs b/arrow-array/src/types.rs
index d7d60cfdc9..e403d67785 100644
--- a/arrow-array/src/types.rs
+++ b/arrow-array/src/types.rs
@@ -1077,10 +1077,7 @@ impl Date64Type {
delta: <IntervalYearMonthType as ArrowPrimitiveType>::Native,
) -> <Date64Type as ArrowPrimitiveType>::Native {
Self::add_year_months_opt(date, delta).unwrap_or_else(|| {
- panic!(
- "Date64Type::add_year_months overflowed for date: {}, delta:
{}",
- date, delta
- )
+ panic!("Date64Type::add_year_months overflowed for date: {date},
delta: {delta}",)
})
}
@@ -1117,10 +1114,7 @@ impl Date64Type {
delta: <IntervalDayTimeType as ArrowPrimitiveType>::Native,
) -> <Date64Type as ArrowPrimitiveType>::Native {
Self::add_day_time_opt(date, delta).unwrap_or_else(|| {
- panic!(
- "Date64Type::add_day_time overflowed for date: {}, delta:
{:?}",
- date, delta
- )
+ panic!("Date64Type::add_day_time overflowed for date: {date},
delta: {delta:?}",)
})
}
@@ -1158,10 +1152,7 @@ impl Date64Type {
delta: <IntervalMonthDayNanoType as ArrowPrimitiveType>::Native,
) -> <Date64Type as ArrowPrimitiveType>::Native {
Self::add_month_day_nano_opt(date, delta).unwrap_or_else(|| {
- panic!(
- "Date64Type::add_month_day_nano overflowed for date: {},
delta: {:?}",
- date, delta
- )
+ panic!("Date64Type::add_month_day_nano overflowed for date:
{date}, delta: {delta:?}",)
})
}
@@ -1200,10 +1191,7 @@ impl Date64Type {
delta: <IntervalYearMonthType as ArrowPrimitiveType>::Native,
) -> <Date64Type as ArrowPrimitiveType>::Native {
Self::subtract_year_months_opt(date, delta).unwrap_or_else(|| {
- panic!(
- "Date64Type::subtract_year_months overflowed for date: {},
delta: {}",
- date, delta
- )
+ panic!("Date64Type::subtract_year_months overflowed for date:
{date}, delta: {delta}",)
})
}
@@ -1240,10 +1228,7 @@ impl Date64Type {
delta: <IntervalDayTimeType as ArrowPrimitiveType>::Native,
) -> <Date64Type as ArrowPrimitiveType>::Native {
Self::subtract_day_time_opt(date, delta).unwrap_or_else(|| {
- panic!(
- "Date64Type::subtract_day_time overflowed for date: {}, delta:
{:?}",
- date, delta
- )
+ panic!("Date64Type::subtract_day_time overflowed for date: {date},
delta: {delta:?}",)
})
}
@@ -1282,8 +1267,7 @@ impl Date64Type {
) -> <Date64Type as ArrowPrimitiveType>::Native {
Self::subtract_month_day_nano_opt(date, delta).unwrap_or_else(|| {
panic!(
- "Date64Type::subtract_month_day_nano overflowed for date: {},
delta: {:?}",
- date, delta
+ "Date64Type::subtract_month_day_nano overflowed for date:
{date}, delta: {delta:?}",
)
})
}
diff --git a/arrow-avro/benches/avro_reader.rs
b/arrow-avro/benches/avro_reader.rs
index b525a0c788..7b1a5afff8 100644
--- a/arrow-avro/benches/avro_reader.rs
+++ b/arrow-avro/benches/avro_reader.rs
@@ -38,7 +38,7 @@ use tempfile::NamedTempFile;
fn create_test_data(count: usize, str_length: usize) -> Vec<String> {
(0..count)
- .map(|i| format!("str_{}", i) + &"a".repeat(str_length))
+ .map(|i| format!("str_{i}") + &"a".repeat(str_length))
.collect()
}
@@ -101,7 +101,7 @@ fn read_avro_test_file(
reader.read_exact(&mut buf)?;
let s = String::from_utf8(buf)
- .map_err(|e| ArrowError::ParseError(format!("Invalid UTF-8: {}",
e)))?;
+ .map_err(|e| ArrowError::ParseError(format!("Invalid UTF-8:
{e}")))?;
strings.push(s);
@@ -143,7 +143,7 @@ fn bench_array_creation(c: &mut Criterion) {
let data = create_test_data(10000, str_length);
let row_count = 1000;
- group.bench_function(format!("string_array_{}_chars", str_length), |b|
{
+ group.bench_function(format!("string_array_{str_length}_chars"), |b| {
b.iter(|| {
let string_array =
StringArray::from_iter(data[0..row_count].iter().map(|s|
Some(s.as_str())));
@@ -167,7 +167,7 @@ fn bench_array_creation(c: &mut Criterion) {
})
});
- group.bench_function(format!("string_view_{}_chars", str_length), |b| {
+ group.bench_function(format!("string_view_{str_length}_chars"), |b| {
b.iter(|| {
let string_array =
StringViewArray::from_iter(data[0..row_count].iter().map(|s| Some(s.as_str())));
@@ -208,7 +208,7 @@ fn bench_string_operations(c: &mut Criterion) {
let string_view_array =
StringViewArray::from_iter(data[0..rows].iter().map(|s|
Some(s.as_str())));
- group.bench_function(format!("string_array_value_{}_chars",
str_length), |b| {
+ group.bench_function(format!("string_array_value_{str_length}_chars"),
|b| {
b.iter(|| {
let mut sum_len = 0;
for i in 0..rows {
@@ -218,7 +218,7 @@ fn bench_string_operations(c: &mut Criterion) {
})
});
- group.bench_function(format!("string_view_value_{}_chars",
str_length), |b| {
+ group.bench_function(format!("string_view_value_{str_length}_chars"),
|b| {
b.iter(|| {
let mut sum_len = 0;
for i in 0..rows {
@@ -242,7 +242,7 @@ fn bench_avro_reader(c: &mut Criterion) {
let temp_file = create_avro_test_file(row_count, str_length).unwrap();
let file_path = temp_file.path();
- group.bench_function(format!("string_array_{}_chars", str_length), |b|
{
+ group.bench_function(format!("string_array_{str_length}_chars"), |b| {
b.iter(|| {
let options = ReadOptions::default();
let batch = read_avro_test_file(file_path, &options).unwrap();
@@ -250,7 +250,7 @@ fn bench_avro_reader(c: &mut Criterion) {
})
});
- group.bench_function(format!("string_view_{}_chars", str_length), |b| {
+ group.bench_function(format!("string_view_{str_length}_chars"), |b| {
b.iter(|| {
let options = ReadOptions::default().with_utf8view(true);
let batch = read_avro_test_file(file_path, &options).unwrap();
diff --git a/arrow-avro/examples/read_with_utf8view.rs
b/arrow-avro/examples/read_with_utf8view.rs
index 2fa4782034..d79f8dad56 100644
--- a/arrow-avro/examples/read_with_utf8view.rs
+++ b/arrow-avro/examples/read_with_utf8view.rs
@@ -55,8 +55,8 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let view_duration = start.elapsed();
println!("Read {} rows from {}", batch.num_rows(), file_path);
- println!("Reading with StringArray: {:?}", regular_duration);
- println!("Reading with StringViewArray: {:?}", view_duration);
+ println!("Reading with StringArray: {regular_duration:?}");
+ println!("Reading with StringViewArray: {view_duration:?}");
if regular_duration > view_duration {
println!(
@@ -117,5 +117,5 @@ fn read_avro_with_options(
let int_array: ArrayRef = Arc::new(Int32Array::from(int_data));
RecordBatch::try_new(Arc::new(mock_schema), vec![string_array, int_array])
- .map_err(|e| ArrowError::ComputeError(format!("Failed to create record
batch: {}", e)))
+ .map_err(|e| ArrowError::ComputeError(format!("Failed to create record
batch: {e}")))
}
diff --git a/arrow-buffer/src/util/bit_mask.rs
b/arrow-buffer/src/util/bit_mask.rs
index 0d694d13ec..6030cb4b1b 100644
--- a/arrow-buffer/src/util/bit_mask.rs
+++ b/arrow-buffer/src/util/bit_mask.rs
@@ -278,7 +278,7 @@ mod tests {
impl Display for BinaryFormatter<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for byte in self.0 {
- write!(f, "{:08b} ", byte)?;
+ write!(f, "{byte:08b} ")?;
}
write!(f, " ")?;
Ok(())
@@ -389,8 +389,8 @@ mod tests {
self.len,
);
- assert_eq!(actual, self.expected_data, "self: {}", self);
- assert_eq!(null_count, self.expected_null_count, "self: {}", self);
+ assert_eq!(actual, self.expected_data, "self: {self}");
+ assert_eq!(null_count, self.expected_null_count, "self: {self}");
}
}
diff --git a/arrow-cast/src/cast/decimal.rs b/arrow-cast/src/cast/decimal.rs
index b86d93bc81..57dfc51d74 100644
--- a/arrow-cast/src/cast/decimal.rs
+++ b/arrow-cast/src/cast/decimal.rs
@@ -505,8 +505,7 @@ where
)?,
other => {
return Err(ArrowError::ComputeError(format!(
- "Cannot cast {:?} to decimal",
- other
+ "Cannot cast {other:?} to decimal",
)))
}
};
diff --git a/arrow-cast/src/cast/mod.rs b/arrow-cast/src/cast/mod.rs
index b317dabd5d..884a32197c 100644
--- a/arrow-cast/src/cast/mod.rs
+++ b/arrow-cast/src/cast/mod.rs
@@ -7695,13 +7695,11 @@ mod tests {
);
let list_array = cast(&array, expected.data_type())
- .unwrap_or_else(|_| panic!("Failed to cast {:?} to {:?}",
array, expected));
+ .unwrap_or_else(|_| panic!("Failed to cast {array:?} to
{expected:?}"));
assert_eq!(
list_array.as_ref(),
&expected,
- "Incorrect result from casting {:?} to {:?}",
- array,
- expected
+ "Incorrect result from casting {array:?} to {expected:?}",
);
}
}
@@ -7935,7 +7933,7 @@ mod tests {
},
);
assert!(res.is_err());
- assert!(format!("{:?}", res)
+ assert!(format!("{res:?}")
.contains("Cannot cast to FixedSizeList(3): value at index 1 has
length 2"));
// When safe=true (default), the cast will fill nulls for lists that
are
@@ -8026,7 +8024,7 @@ mod tests {
},
);
assert!(res.is_err());
- assert!(format!("{:?}", res).contains("Can't cast value 2147483647 to
type Int16"));
+ assert!(format!("{res:?}").contains("Can't cast value 2147483647 to
type Int16"));
}
#[test]
@@ -9090,7 +9088,7 @@ mod tests {
Some(array.value_as_string(i))
};
let actual = actual.as_ref().map(|s| s.as_ref());
- assert_eq!(*expected, actual, "Expected at position {}", i);
+ assert_eq!(*expected, actual, "Expected at position {i}");
}
}
diff --git a/arrow-cast/src/display.rs b/arrow-cast/src/display.rs
index 6761ac22fa..b466a59c20 100644
--- a/arrow-cast/src/display.rs
+++ b/arrow-cast/src/display.rs
@@ -776,12 +776,12 @@ impl Display for NanosecondsFormatter<'_> {
let nanoseconds = self.nanoseconds % 1_000_000_000;
if hours != 0 {
- write!(f, "{prefix}{} hours", hours)?;
+ write!(f, "{prefix}{hours} hours")?;
prefix = " ";
}
if mins != 0 {
- write!(f, "{prefix}{} mins", mins)?;
+ write!(f, "{prefix}{mins} mins")?;
prefix = " ";
}
@@ -819,12 +819,12 @@ impl Display for MillisecondsFormatter<'_> {
let milliseconds = self.milliseconds % 1_000;
if hours != 0 {
- write!(f, "{prefix}{} hours", hours,)?;
+ write!(f, "{prefix}{hours} hours")?;
prefix = " ";
}
if mins != 0 {
- write!(f, "{prefix}{} mins", mins,)?;
+ write!(f, "{prefix}{mins} mins")?;
prefix = " ";
}
diff --git a/arrow-cast/src/parse.rs b/arrow-cast/src/parse.rs
index 28d36db89a..890719964d 100644
--- a/arrow-cast/src/parse.rs
+++ b/arrow-cast/src/parse.rs
@@ -1235,8 +1235,7 @@ impl Interval {
match (self.months, self.days, self.nanos) {
(months, days, nanos) if days == 0 && nanos == 0 => Ok(months),
_ => Err(ArrowError::InvalidArgumentError(format!(
- "Unable to represent interval with days and nanos as
year-months: {:?}",
- self
+ "Unable to represent interval with days and nanos as
year-months: {self:?}"
))),
}
}
diff --git a/arrow-data/src/transform/run.rs b/arrow-data/src/transform/run.rs
index 0d37a8374c..1ab6d0d319 100644
--- a/arrow-data/src/transform/run.rs
+++ b/arrow-data/src/transform/run.rs
@@ -75,10 +75,7 @@ pub fn extend_nulls(mutable: &mut _MutableArrayData, len:
usize) {
DataType::Int16 => extend_nulls_impl!(i16),
DataType::Int32 => extend_nulls_impl!(i32),
DataType::Int64 => extend_nulls_impl!(i64),
- _ => panic!(
- "Invalid run end type for RunEndEncoded array: {:?}",
- run_end_type
- ),
+ _ => panic!("Invalid run end type for RunEndEncoded array:
{run_end_type:?}"),
};
mutable.child_data[0].data.len += 1;
@@ -228,10 +225,7 @@ pub fn build_extend(array: &ArrayData) -> Extend {
DataType::Int16 => build_and_process_impl!(i16),
DataType::Int32 => build_and_process_impl!(i32),
DataType::Int64 => build_and_process_impl!(i64),
- _ => panic!(
- "Invalid run end type for RunEndEncoded array: {:?}",
- dest_run_end_type
- ),
+ _ => panic!("Invalid run end type for RunEndEncoded array:
{dest_run_end_type:?}",),
}
},
)
diff --git a/arrow-flight/examples/flight_sql_server.rs
b/arrow-flight/examples/flight_sql_server.rs
index 396b72f4cb..b0dc9b1b74 100644
--- a/arrow-flight/examples/flight_sql_server.rs
+++ b/arrow-flight/examples/flight_sql_server.rs
@@ -189,7 +189,7 @@ impl FlightSqlService for FlightSqlServiceImpl {
let result = Ok(result);
let output = futures::stream::iter(vec![result]);
- let token = format!("Bearer {}", FAKE_TOKEN);
+ let token = format!("Bearer {FAKE_TOKEN}");
let mut response: Response<Pin<Box<dyn Stream<Item = _> + Send>>> =
Response::new(Box::pin(output));
response.metadata_mut().append(
@@ -745,7 +745,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
let addr_str = "0.0.0.0:50051";
let addr = addr_str.parse()?;
- println!("Listening on {:?}", addr);
+ println!("Listening on {addr:?}");
if std::env::var("USE_TLS").ok().is_some() {
let cert =
std::fs::read_to_string("arrow-flight/examples/data/server.pem")?;
diff --git a/arrow-flight/src/error.rs b/arrow-flight/src/error.rs
index ac80305832..d5ac568e97 100644
--- a/arrow-flight/src/error.rs
+++ b/arrow-flight/src/error.rs
@@ -51,12 +51,12 @@ impl FlightError {
impl std::fmt::Display for FlightError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
- FlightError::Arrow(source) => write!(f, "Arrow error: {}", source),
- FlightError::NotYetImplemented(desc) => write!(f, "Not yet
implemented: {}", desc),
- FlightError::Tonic(source) => write!(f, "Tonic error: {}", source),
- FlightError::ProtocolError(desc) => write!(f, "Protocol error:
{}", desc),
- FlightError::DecodeError(desc) => write!(f, "Decode error: {}",
desc),
- FlightError::ExternalError(source) => write!(f, "External error:
{}", source),
+ FlightError::Arrow(source) => write!(f, "Arrow error: {source}"),
+ FlightError::NotYetImplemented(desc) => write!(f, "Not yet
implemented: {desc}"),
+ FlightError::Tonic(source) => write!(f, "Tonic error: {source}"),
+ FlightError::ProtocolError(desc) => write!(f, "Protocol error:
{desc}"),
+ FlightError::DecodeError(desc) => write!(f, "Decode error:
{desc}"),
+ FlightError::ExternalError(source) => write!(f, "External error:
{source}"),
}
}
}
diff --git a/arrow-flight/src/sql/metadata/mod.rs
b/arrow-flight/src/sql/metadata/mod.rs
index fd71149a31..66c12fce9a 100644
--- a/arrow-flight/src/sql/metadata/mod.rs
+++ b/arrow-flight/src/sql/metadata/mod.rs
@@ -70,8 +70,7 @@ mod tests {
let actual_lines: Vec<_> = formatted.trim().lines().collect();
assert_eq!(
&actual_lines, expected_lines,
- "\n\nexpected:\n\n{:#?}\nactual:\n\n{:#?}\n\n",
- expected_lines, actual_lines
+
"\n\nexpected:\n\n{expected_lines:#?}\nactual:\n\n{actual_lines:#?}\n\n",
);
}
}
diff --git a/arrow-flight/src/sql/server.rs b/arrow-flight/src/sql/server.rs
index add7c8db40..da5dc9945e 100644
--- a/arrow-flight/src/sql/server.rs
+++ b/arrow-flight/src/sql/server.rs
@@ -392,7 +392,7 @@ pub trait FlightSqlService: Sync + Send + Sized + 'static {
_request: Request<PeekableFlightDataStream>,
error: DoPutError,
) -> Result<Response<<Self as FlightService>::DoPutStream>, Status> {
- Err(Status::unimplemented(format!("Unhandled Error: {}", error)))
+ Err(Status::unimplemented(format!("Unhandled Error: {error}")))
}
/// Execute an update SQL statement.
diff --git a/arrow-integration-testing/src/lib.rs
b/arrow-integration-testing/src/lib.rs
index e669690ef4..10512a00eb 100644
--- a/arrow-integration-testing/src/lib.rs
+++ b/arrow-integration-testing/src/lib.rs
@@ -207,8 +207,7 @@ fn cdata_integration_import_schema_and_compare_to_json(
// compare schemas
if canonicalize_schema(&json_schema) !=
canonicalize_schema(&imported_schema) {
return Err(ArrowError::ComputeError(format!(
- "Schemas do not match.\n- JSON: {:?}\n- Imported: {:?}",
- json_schema, imported_schema
+ "Schemas do not match.\n- JSON: {json_schema:?}\n- Imported:
{imported_schema:?}",
)));
}
Ok(())
@@ -253,7 +252,7 @@ fn cdata_integration_import_batch_and_compare_to_json(
fn result_to_c_error<T, E: std::fmt::Display>(result: &std::result::Result<T,
E>) -> *mut c_char {
match result {
Ok(_) => ptr::null_mut(),
- Err(e) => CString::new(format!("{}", e)).unwrap().into_raw(),
+ Err(e) => CString::new(format!("{e}")).unwrap().into_raw(),
}
}
diff --git a/arrow-ipc/src/reader.rs b/arrow-ipc/src/reader.rs
index 7f9b4b2937..919407dcda 100644
--- a/arrow-ipc/src/reader.rs
+++ b/arrow-ipc/src/reader.rs
@@ -544,8 +544,7 @@ impl<'a> RecordBatchDecoder<'a> {
fn next_node(&mut self, field: &Field) -> Result<&'a FieldNode,
ArrowError> {
self.nodes.next().ok_or_else(|| {
ArrowError::SchemaError(format!(
- "Invalid data for schema. {} refers to node not found in
schema",
- field
+ "Invalid data for schema. {field} refers to node not found in
schema",
))
})
}
diff --git a/arrow-ipc/src/reader/stream.rs b/arrow-ipc/src/reader/stream.rs
index f3aab9a82b..e894678142 100644
--- a/arrow-ipc/src/reader/stream.rs
+++ b/arrow-ipc/src/reader/stream.rs
@@ -408,7 +408,7 @@ mod tests {
while let Some(batch) = decoder
.decode(buf)
.map_err(|e| {
- ArrowError::ExternalError(format!("Failed to decode record
batch: {}", e).into())
+ ArrowError::ExternalError(format!("Failed to decode record
batch: {e}").into())
})
.expect("Failed to decode record batch")
{
diff --git a/arrow-ipc/src/writer.rs b/arrow-ipc/src/writer.rs
index c800ddd290..bd255fd2d5 100644
--- a/arrow-ipc/src/writer.rs
+++ b/arrow-ipc/src/writer.rs
@@ -2517,7 +2517,7 @@ mod tests {
let strings: Vec<_> = (0..8000)
.map(|i| {
if i % 2 == 0 {
- Some(format!("value{}", i))
+ Some(format!("value{i}"))
} else {
None
}
@@ -2951,7 +2951,7 @@ mod tests {
let mut fields = Vec::new();
let mut arrays = Vec::new();
for i in 0..num_cols {
- let field = Field::new(format!("col_{}", i),
DataType::Decimal128(38, 10), true);
+ let field = Field::new(format!("col_{i}"),
DataType::Decimal128(38, 10), true);
let array = Decimal128Array::from(vec![num_cols as i128;
num_rows]);
fields.push(field);
arrays.push(Arc::new(array) as Arc<dyn Array>);
@@ -3006,7 +3006,7 @@ mod tests {
let mut fields = Vec::new();
let mut arrays = Vec::new();
for i in 0..num_cols {
- let field = Field::new(format!("col_{}", i),
DataType::Decimal128(38, 10), true);
+ let field = Field::new(format!("col_{i}"),
DataType::Decimal128(38, 10), true);
let array = Decimal128Array::from(vec![num_cols as i128;
num_rows]);
fields.push(field);
arrays.push(Arc::new(array) as Arc<dyn Array>);
@@ -3061,7 +3061,7 @@ mod tests {
let mut fields = Vec::new();
let options = IpcWriteOptions::try_new(8, false,
MetadataVersion::V5).unwrap();
for i in 0..num_cols {
- let field = Field::new(format!("col_{}", i),
DataType::Decimal128(38, 10), true);
+ let field = Field::new(format!("col_{i}"),
DataType::Decimal128(38, 10), true);
fields.push(field);
}
let schema = Schema::new(fields);
diff --git a/arrow-json/src/reader/mod.rs b/arrow-json/src/reader/mod.rs
index cd33e337be..af19d05763 100644
--- a/arrow-json/src/reader/mod.rs
+++ b/arrow-json/src/reader/mod.rs
@@ -948,9 +948,7 @@ mod tests {
// (The actual buffer may be larger than expected due to rounding or
internal allocation strategies.)
assert!(
data_buffer >= expected_capacity,
- "Data buffer length ({}) should be at least {}",
- data_buffer,
- expected_capacity
+ "Data buffer length ({data_buffer}) should be at least
{expected_capacity}",
);
// Additionally, verify that the decoded values are correct.
@@ -994,9 +992,7 @@ mod tests {
let data_buffer = string_view_array.to_data().buffers()[0].len();
assert!(
data_buffer >= expected_capacity,
- "Data buffer length ({}) should be at least {}",
- data_buffer,
- expected_capacity
+ "Data buffer length ({data_buffer}) should be at least
{expected_capacity}",
);
// Verify that the converted string values are correct.
diff --git a/arrow-json/src/reader/schema.rs b/arrow-json/src/reader/schema.rs
index 07eb40106d..c29a7bbe1a 100644
--- a/arrow-json/src/reader/schema.rs
+++ b/arrow-json/src/reader/schema.rs
@@ -655,8 +655,7 @@ mod tests {
let bigger_than_i64_max = (i64::MAX as i128) + 1;
let smaller_than_i64_min = (i64::MIN as i128) - 1;
let json = format!(
- "{{ \"bigger_than_i64_max\": {}, \"smaller_than_i64_min\": {} }}",
- bigger_than_i64_max, smaller_than_i64_min
+ "{{ \"bigger_than_i64_max\": {bigger_than_i64_max},
\"smaller_than_i64_min\": {smaller_than_i64_min} }}",
);
let mut buf_reader = BufReader::new(json.as_bytes());
let (inferred_schema, _) = infer_json_schema(&mut buf_reader,
Some(1)).unwrap();
diff --git a/arrow-json/src/reader/string_view_array.rs
b/arrow-json/src/reader/string_view_array.rs
index 8aeb1c8058..44f7e3fd6a 100644
--- a/arrow-json/src/reader/string_view_array.rs
+++ b/arrow-json/src/reader/string_view_array.rs
@@ -131,26 +131,26 @@ impl ArrayDecoder for StringViewArrayDecoder {
let val = ((high as i64) << 32) | (low as u32) as i64;
tmp_buf.clear();
// Reuse the temporary buffer instead of allocating a
new String
- write!(&mut tmp_buf, "{}", val).unwrap();
+ write!(&mut tmp_buf, "{val}").unwrap();
builder.append_value(&tmp_buf);
}
_ => unreachable!(),
},
TapeElement::I32(n) if coerce => {
tmp_buf.clear();
- write!(&mut tmp_buf, "{}", n).unwrap();
+ write!(&mut tmp_buf, "{n}").unwrap();
builder.append_value(&tmp_buf);
}
TapeElement::F32(n) if coerce => {
tmp_buf.clear();
- write!(&mut tmp_buf, "{}", n).unwrap();
+ write!(&mut tmp_buf, "{n}").unwrap();
builder.append_value(&tmp_buf);
}
TapeElement::F64(high) if coerce => match tape.get(p + 1) {
TapeElement::F32(low) => {
let val = f64::from_bits(((high as u64) << 32) | (low
as u64));
tmp_buf.clear();
- write!(&mut tmp_buf, "{}", val).unwrap();
+ write!(&mut tmp_buf, "{val}").unwrap();
builder.append_value(&tmp_buf);
}
_ => unreachable!(),
diff --git a/arrow-json/src/reader/struct_array.rs
b/arrow-json/src/reader/struct_array.rs
index b9408df77a..f81a40c71e 100644
--- a/arrow-json/src/reader/struct_array.rs
+++ b/arrow-json/src/reader/struct_array.rs
@@ -106,8 +106,7 @@ impl ArrayDecoder for StructArrayDecoder {
None => {
if self.strict_mode {
return Err(ArrowError::JsonError(format!(
- "column '{}' missing from schema",
- field_name
+ "column '{field_name}' missing from
schema",
)));
}
}
diff --git a/arrow-json/src/writer/encoder.rs b/arrow-json/src/writer/encoder.rs
index d9481cc484..de2e146702 100644
--- a/arrow-json/src/writer/encoder.rs
+++ b/arrow-json/src/writer/encoder.rs
@@ -356,8 +356,7 @@ pub fn make_encoder<'a>(
NullableEncoder::new(Box::new(formatter) as Box<dyn Encoder +
'a>, nulls)
}
false => return Err(ArrowError::JsonError(format!(
- "Unsupported data type for JSON encoding: {:?}",
- d
+ "Unsupported data type for JSON encoding: {d:?}",
)))
}
};
diff --git a/arrow-json/src/writer/mod.rs b/arrow-json/src/writer/mod.rs
index ee1b5fabe5..549fe77dfe 100644
--- a/arrow-json/src/writer/mod.rs
+++ b/arrow-json/src/writer/mod.rs
@@ -2078,7 +2078,7 @@ mod tests {
None => out.extend_from_slice(b"null"),
Some(UnionValue::Int32(v)) =>
out.extend_from_slice(v.to_string().as_bytes()),
Some(UnionValue::String(v)) => {
- out.extend_from_slice(format!("\"{}\"", v).as_bytes())
+ out.extend_from_slice(format!("\"{v}\"").as_bytes())
}
}
}
diff --git a/arrow-pyarrow/src/lib.rs b/arrow-pyarrow/src/lib.rs
index 566aa7402c..c958da9d1c 100644
--- a/arrow-pyarrow/src/lib.rs
+++ b/arrow-pyarrow/src/lib.rs
@@ -122,8 +122,7 @@ fn validate_class(expected: &str, value: &Bound<PyAny>) ->
PyResult<()> {
.extract::<PyBackedStr>()?;
let found_name =
found_class.getattr("__name__")?.extract::<PyBackedStr>()?;
return Err(PyTypeError::new_err(format!(
- "Expected instance of {}.{}, got {}.{}",
- expected_module, expected_name, found_module, found_name
+ "Expected instance of {expected_module}.{expected_name}, got
{found_module}.{found_name}",
)));
}
Ok(())
@@ -140,8 +139,7 @@ fn validate_pycapsule(capsule: &Bound<PyCapsule>, name:
&str) -> PyResult<()> {
let capsule_name = capsule_name.unwrap().to_str()?;
if capsule_name != name {
return Err(PyValueError::new_err(format!(
- "Expected name '{}' in PyCapsule, instead got '{}'",
- name, capsule_name
+ "Expected name '{name}' in PyCapsule, instead got
'{capsule_name}'",
)));
}
diff --git a/arrow-pyarrow/tests/pyarrow.rs b/arrow-pyarrow/tests/pyarrow.rs
index 8ed21f5d8a..12e2f97abf 100644
--- a/arrow-pyarrow/tests/pyarrow.rs
+++ b/arrow-pyarrow/tests/pyarrow.rs
@@ -32,7 +32,7 @@ fn test_to_pyarrow() {
// The "very long string" will not be inlined, and force the creation of a
data buffer.
let c: ArrayRef = Arc::new(StringViewArray::from(vec!["short", "a very
long string"]));
let input = RecordBatch::try_from_iter(vec![("a", a), ("b", b), ("c",
c)]).unwrap();
- println!("input: {:?}", input);
+ println!("input: {input:?}");
let res = Python::with_gil(|py| {
let py_input = input.to_pyarrow(py)?;
@@ -59,7 +59,7 @@ fn test_to_pyarrow_byte_view() {
])
.unwrap();
- println!("input: {:?}", input);
+ println!("input: {input:?}");
let res = Python::with_gil(|py| {
let py_input = input.to_pyarrow(py)?;
let records = RecordBatch::from_pyarrow_bound(py_input.bind(py))?;
diff --git a/arrow-row/src/lib.rs b/arrow-row/src/lib.rs
index 81320420db..ee1c117859 100644
--- a/arrow-row/src/lib.rs
+++ b/arrow-row/src/lib.rs
@@ -1610,7 +1610,7 @@ unsafe fn decode_column(
DataType::Utf8 => Arc::new(decode_string::<i32>(rows, options,
validate_utf8)),
DataType::LargeUtf8 => Arc::new(decode_string::<i64>(rows,
options, validate_utf8)),
DataType::Utf8View => Arc::new(decode_string_view(rows,
options, validate_utf8)),
- _ => return
Err(ArrowError::NotYetImplemented(format!("unsupported data type: {}",
data_type)))
+ _ => return
Err(ArrowError::NotYetImplemented(format!("unsupported data type: {data_type}"
)))
}
}
Codec::Dictionary(converter, _) => {
@@ -2848,8 +2848,7 @@ mod tests {
for (i, (actual, expected)) in
rows.iter().zip(rows_expected.iter()).enumerate() {
assert_eq!(
actual, expected,
- "For row {}: expected {:?}, actual: {:?}",
- i, expected, actual
+ "For row {i}: expected {expected:?}, actual: {actual:?}",
);
}
}
diff --git a/arrow-row/src/list.rs b/arrow-row/src/list.rs
index 627214dc9c..58fbc71caa 100644
--- a/arrow-row/src/list.rs
+++ b/arrow-row/src/list.rs
@@ -261,8 +261,7 @@ pub unsafe fn decode_fixed_size_list(
DataType::FixedSizeList(element_field, _) => element_field.data_type(),
_ => {
return Err(ArrowError::InvalidArgumentError(format!(
- "Expected FixedSizeListArray, found: {:?}",
- list_type
+ "Expected FixedSizeListArray, found: {list_type:?}",
)))
}
};
diff --git a/arrow-select/src/coalesce.rs b/arrow-select/src/coalesce.rs
index ce436f396f..285f6633c0 100644
--- a/arrow-select/src/coalesce.rs
+++ b/arrow-select/src/coalesce.rs
@@ -1000,7 +1000,7 @@ mod tests {
if i % 3 == 0 {
None
} else {
- Some(format!("value{}", i))
+ Some(format!("value{i}"))
}
}));
diff --git a/parquet-variant/src/to_json.rs b/parquet-variant/src/to_json.rs
index 09efe20a7a..6fcf303ebc 100644
--- a/parquet-variant/src/to_json.rs
+++ b/parquet-variant/src/to_json.rs
@@ -41,7 +41,6 @@ fn format_binary_base64(bytes: &[u8]) -> String {
general_purpose::STANDARD.encode(bytes)
}
-/// Converts a Variant to JSON and writes it to the provided `Write`
///
/// This function writes JSON directly to any type that implements [`Write`],
/// making it efficient for streaming or when you want to control the output
destination.
diff --git a/parquet/benches/arrow_reader_clickbench.rs
b/parquet/benches/arrow_reader_clickbench.rs
index 38d5ed9bb8..243f3208ea 100644
--- a/parquet/benches/arrow_reader_clickbench.rs
+++ b/parquet/benches/arrow_reader_clickbench.rs
@@ -580,14 +580,13 @@ fn hits_1() -> &'static Path {
let current_dir = std::env::current_dir().expect("Failed to get current
directory");
println!(
- "Looking for ClickBench files starting in current_dir and all parent
directories: {:?}",
- current_dir
+ "Looking for ClickBench files starting in current_dir and all parent
directories: {current_dir:?}"
+
);
let Some(hits_1_path) = find_file_if_exists(current_dir.clone(),
"hits_1.parquet") else {
eprintln!(
- "Could not find hits_1.parquet in directory or parents: {:?}.
Download it via",
- current_dir
+ "Could not find hits_1.parquet in directory or parents:
{current_dir:?}. Download it via",
);
eprintln!();
eprintln!("wget --continue
https://datasets.clickhouse.com/hits_compatible/athena_partitioned/hits_1.parquet");
diff --git a/parquet/benches/arrow_reader_row_filter.rs
b/parquet/benches/arrow_reader_row_filter.rs
index 2e44e5aea0..33427a37b5 100644
--- a/parquet/benches/arrow_reader_row_filter.rs
+++ b/parquet/benches/arrow_reader_row_filter.rs
@@ -341,7 +341,7 @@ impl std::fmt::Display for FilterType {
FilterType::Composite => "float64 > 99.0 AND ts >= 9000",
FilterType::Utf8ViewNonEmpty => "utf8View <> ''",
};
- write!(f, "{}", s)
+ write!(f, "{s}")
}
}
diff --git a/parquet/benches/encoding.rs b/parquet/benches/encoding.rs
index 68f215d4ea..baff329583 100644
--- a/parquet/benches/encoding.rs
+++ b/parquet/benches/encoding.rs
@@ -52,7 +52,7 @@ fn bench_typed<T: DataType>(
0,
ColumnPath::new(vec![]),
));
- c.bench_function(&format!("encoding: {}", name), |b| {
+ c.bench_function(&format!("encoding: {name}"), |b| {
b.iter(|| {
let mut encoder = get_encoder::<T>(encoding,
&column_desc_ptr).unwrap();
encoder.put(values).unwrap();
@@ -66,7 +66,7 @@ fn bench_typed<T: DataType>(
println!("{} encoded as {} bytes", name, encoded.len(),);
let mut buffer = vec![T::T::default(); values.len()];
- c.bench_function(&format!("decoding: {}", name), |b| {
+ c.bench_function(&format!("decoding: {name}"), |b| {
b.iter(|| {
let mut decoder: Box<dyn Decoder<T>> =
get_decoder(column_desc_ptr.clone(), encoding).unwrap();
diff --git a/parquet/examples/external_metadata.rs
b/parquet/examples/external_metadata.rs
index 2c3250782c..2710251e55 100644
--- a/parquet/examples/external_metadata.rs
+++ b/parquet/examples/external_metadata.rs
@@ -140,7 +140,7 @@ fn prepare_metadata(metadata: ParquetMetaData) ->
ParquetMetaData {
// verifiy that the size has indeed been reduced
let new_size = metadata.memory_size();
assert!(new_size < orig_size, "metadata size did not decrease");
- println!("Reduced metadata size from {} to {}", orig_size, new_size);
+ println!("Reduced metadata size from {orig_size} to {new_size}");
metadata
}
diff --git a/parquet/src/arrow/async_writer/mod.rs
b/parquet/src/arrow/async_writer/mod.rs
index 27bd2bf816..faec427907 100644
--- a/parquet/src/arrow/async_writer/mod.rs
+++ b/parquet/src/arrow/async_writer/mod.rs
@@ -305,7 +305,7 @@ mod tests {
fn get_test_reader() -> ParquetRecordBatchReader {
let testdata = arrow::util::test_util::parquet_test_data();
// This test file is large enough to generate multiple row groups.
- let path = format!("{}/alltypes_tiny_pages_plain.parquet", testdata);
+ let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet");
let original_data = Bytes::from(std::fs::read(path).unwrap());
ParquetRecordBatchReaderBuilder::try_new(original_data)
.unwrap()
diff --git a/parquet/src/arrow/buffer/view_buffer.rs
b/parquet/src/arrow/buffer/view_buffer.rs
index fd7d6c213f..97db778e47 100644
--- a/parquet/src/arrow/buffer/view_buffer.rs
+++ b/parquet/src/arrow/buffer/view_buffer.rs
@@ -91,7 +91,7 @@ impl ViewBuffer {
let array = unsafe { builder.build_unchecked() };
make_array(array)
}
- _ => panic!("Unsupported data type: {:?}", data_type),
+ _ => panic!("Unsupported data type: {data_type:?}"),
}
}
}
diff --git a/parquet/src/basic.rs b/parquet/src/basic.rs
index 99f122fe4c..700bba1c63 100644
--- a/parquet/src/basic.rs
+++ b/parquet/src/basic.rs
@@ -404,7 +404,7 @@ impl Compression {
/// Returns the codec type of this compression setting as a string,
without the compression
/// level.
pub(crate) fn codec_to_string(self) -> String {
- format!("{:?}", self).split('(').next().unwrap().to_owned()
+ format!("{self:?}").split('(').next().unwrap().to_owned()
}
}
@@ -416,7 +416,7 @@ fn split_compression_string(str_setting: &str) ->
Result<(&str, Option<u32>), Pa
let level = &level_str[..level_str.len() - 1]
.parse::<u32>()
.map_err(|_| {
- ParquetError::General(format!("invalid compression level:
{}", level_str))
+ ParquetError::General(format!("invalid compression level:
{level_str}"))
})?;
Ok((codec, Some(*level)))
}
@@ -436,8 +436,7 @@ fn check_level_is_none(level: &Option<u32>) -> Result<(),
ParquetError> {
fn require_level(codec: &str, level: Option<u32>) -> Result<u32, ParquetError>
{
level.ok_or(ParquetError::General(format!(
- "{} requires a compression level",
- codec
+ "{codec} requires a compression level",
)))
}
@@ -2359,7 +2358,7 @@ mod tests {
// test unknown string
match "plain_xxx".parse::<Encoding>() {
Ok(e) => {
- panic!("Should not be able to parse {:?}", e);
+ panic!("Should not be able to parse {e:?}");
}
Err(e) => {
assert_eq!(e.to_string(), "Parquet error: unknown encoding:
plain_xxx");
diff --git a/parquet/src/bin/parquet-show-bloom-filter.rs
b/parquet/src/bin/parquet-show-bloom-filter.rs
index 41e3ac9b52..aa072c160b 100644
--- a/parquet/src/bin/parquet-show-bloom-filter.rs
+++ b/parquet/src/bin/parquet-show-bloom-filter.rs
@@ -128,13 +128,13 @@ fn check_filter(sbbf: &Sbbf, value: &String, column:
&ColumnChunkMetaData) -> Re
Type::INT32 => {
let value: i32 = value
.parse()
- .map_err(|e| format!("Unable to parse value '{}' to i32: {}",
value, e))?;
+ .map_err(|e| format!("Unable to parse value '{value}' to i32:
{e}"))?;
Ok(sbbf.check(&value))
}
Type::INT64 => {
let value: i64 = value
.parse()
- .map_err(|e| format!("Unable to parse value '{}' to i64: {}",
value, e))?;
+ .map_err(|e| format!("Unable to parse value '{value}' to i64:
{e}"))?;
Ok(sbbf.check(&value))
}
Type::BYTE_ARRAY => Ok(sbbf.check(&value.as_str())),
diff --git a/parquet/src/encryption/ciphers.rs
b/parquet/src/encryption/ciphers.rs
index 5764694675..a211616507 100644
--- a/parquet/src/encryption/ciphers.rs
+++ b/parquet/src/encryption/ciphers.rs
@@ -155,7 +155,7 @@ impl BlockEncryptor for RingGcmBlockEncryptor {
// Format is: [ciphertext size, nonce, ciphertext, authentication tag]
let ciphertext_length: u32 = (NONCE_LEN + plaintext.len() + TAG_LEN)
.try_into()
- .map_err(|err| General(format!("Plaintext data too long. {:?}",
err)))?;
+ .map_err(|err| General(format!("Plaintext data too long.
{err:?}")))?;
// Not checking for overflow here because we've already checked for it
with ciphertext_length
let mut ciphertext = Vec::with_capacity(SIZE_LEN + ciphertext_length
as usize);
ciphertext.extend((ciphertext_length).to_le_bytes());
diff --git a/parquet/src/file/properties.rs b/parquet/src/file/properties.rs
index 88425fd2b5..a84d58bcce 100644
--- a/parquet/src/file/properties.rs
+++ b/parquet/src/file/properties.rs
@@ -93,7 +93,7 @@ impl FromStr for WriterVersion {
match s {
"PARQUET_1_0" | "parquet_1_0" => Ok(WriterVersion::PARQUET_1_0),
"PARQUET_2_0" | "parquet_2_0" => Ok(WriterVersion::PARQUET_2_0),
- _ => Err(format!("Invalid writer version: {}", s)),
+ _ => Err(format!("Invalid writer version: {s}")),
}
}
}
@@ -949,7 +949,7 @@ impl FromStr for EnabledStatistics {
"NONE" | "none" => Ok(EnabledStatistics::None),
"CHUNK" | "chunk" => Ok(EnabledStatistics::Chunk),
"PAGE" | "page" => Ok(EnabledStatistics::Page),
- _ => Err(format!("Invalid statistics arg: {}", s)),
+ _ => Err(format!("Invalid statistics arg: {s}")),
}
}
}
diff --git a/parquet/src/file/statistics.rs b/parquet/src/file/statistics.rs
index b7522a76f0..9087ea1765 100644
--- a/parquet/src/file/statistics.rs
+++ b/parquet/src/file/statistics.rs
@@ -133,8 +133,7 @@ pub fn from_thrift(
if null_count < 0 {
return Err(ParquetError::General(format!(
- "Statistics null count is negative {}",
- null_count
+ "Statistics null count is negative {null_count}",
)));
}
diff --git a/parquet/src/schema/printer.rs b/parquet/src/schema/printer.rs
index 44c742fca6..f9e06413e9 100644
--- a/parquet/src/schema/printer.rs
+++ b/parquet/src/schema/printer.rs
@@ -391,7 +391,7 @@ impl Printer<'_> {
scale,
);
if !logical_type_str.is_empty() {
- write!(self.output, " ({});", logical_type_str);
+ write!(self.output, " ({logical_type_str});");
} else {
write!(self.output, ";");
}
diff --git a/parquet/src/thrift.rs b/parquet/src/thrift.rs
index bf8a2926aa..1cbd47a900 100644
--- a/parquet/src/thrift.rs
+++ b/parquet/src/thrift.rs
@@ -202,7 +202,7 @@ impl TInputProtocol for TCompactSliceInputProtocol<'_> {
0x00 | 0x02 => Ok(false),
unkn => Err(thrift::Error::Protocol(thrift::ProtocolError {
kind: thrift::ProtocolErrorKind::InvalidData,
- message: format!("cannot convert {} into bool", unkn),
+ message: format!("cannot convert {unkn} into bool"),
})),
}
}
@@ -303,7 +303,7 @@ fn u8_to_type(b: u8) -> thrift::Result<TType> {
0x0C => Ok(TType::Struct),
unkn => Err(thrift::Error::Protocol(thrift::ProtocolError {
kind: thrift::ProtocolErrorKind::InvalidData,
- message: format!("cannot convert {} into TType", unkn),
+ message: format!("cannot convert {unkn} into TType"),
})),
}
}
diff --git a/parquet/tests/arrow_reader/bad_data.rs
b/parquet/tests/arrow_reader/bad_data.rs
index b427bd4302..ba50e738f6 100644
--- a/parquet/tests/arrow_reader/bad_data.rs
+++ b/parquet/tests/arrow_reader/bad_data.rs
@@ -135,7 +135,7 @@ fn test_arrow_rs_gh_45185_dict_levels() {
/// Returns an error if the file is invalid
fn read_file(name: &str) -> Result<usize, ParquetError> {
let path = bad_data_dir().join(name);
- println!("Reading file: {:?}", path);
+ println!("Reading file: {path:?}");
let file = std::fs::File::open(&path).unwrap();
let reader = ArrowReaderBuilder::try_new(file)?.build()?;
diff --git a/parquet/tests/arrow_reader/checksum.rs
b/parquet/tests/arrow_reader/checksum.rs
index b500b7cb1d..1a37289925 100644
--- a/parquet/tests/arrow_reader/checksum.rs
+++ b/parquet/tests/arrow_reader/checksum.rs
@@ -63,7 +63,7 @@ fn test_rle_dict_snappy_checksum() {
/// The record batch data is replaced with () and errors are stringified.
fn read_file_batch_errors(name: &str) -> Vec<Result<(), String>> {
let path = PathBuf::from(parquet_test_data()).join(name);
- println!("Reading file: {:?}", path);
+ println!("Reading file: {path:?}");
let file = std::fs::File::open(&path).unwrap();
let reader = ArrowReaderBuilder::try_new(file).unwrap().build().unwrap();
reader
diff --git a/parquet/tests/encryption/encryption_util.rs
b/parquet/tests/encryption/encryption_util.rs
index 382193d258..5e962fe075 100644
--- a/parquet/tests/encryption/encryption_util.rs
+++ b/parquet/tests/encryption/encryption_util.rs
@@ -115,7 +115,7 @@ pub fn verify_column_indexes(metadata: &ParquetMetaData) {
.is_some_and(|max| (max - 53.9).abs() < 1e-6));
}
_ => {
- panic!("Expected a float column index for column {}",
float_col_idx);
+ panic!("Expected a float column index for column {float_col_idx}");
}
};
}
@@ -145,14 +145,13 @@ impl TestKeyRetriever {
impl KeyRetriever for TestKeyRetriever {
fn retrieve_key(&self, key_metadata: &[u8]) -> Result<Vec<u8>> {
let key_metadata = std::str::from_utf8(key_metadata).map_err(|e| {
- ParquetError::General(format!("Could not convert key metadata to
string: {}", e))
+ ParquetError::General(format!("Could not convert key metadata to
string: {e}"))
})?;
let keys = self.keys.lock().unwrap();
match keys.get(key_metadata) {
Some(key) => Ok(key.clone()),
None => Err(ParquetError::General(format!(
- "Could not retrieve key for metadata {:?}",
- key_metadata
+ "Could not retrieve key for metadata {key_metadata:?}"
))),
}
}