This is an automated email from the ASF dual-hosted git repository.

tustvold pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git


The following commit(s) were added to refs/heads/master by this push:
     new aad42b52f85 Remove unused import detected by nightly rust (#5477)
aad42b52f85 is described below

commit aad42b52f853d0b4085a2131a4eb70f1b5029054
Author: Xiangpeng Hao <[email protected]>
AuthorDate: Sun Mar 10 21:27:43 2024 -0500

    Remove unused import detected by nightly rust (#5477)
    
    * remove unused import
    
    * remove more unused code
    
    * suppress warning on make_fixed_len_byte_array_reader
    
    * update fmt
    
    * Update parquet/src/arrow/array_reader/mod.rs
    
    Co-authored-by: Raphael Taylor-Davies 
<[email protected]>
    
    * add back the lz4_codec
    
    ---------
    
    Co-authored-by: Raphael Taylor-Davies 
<[email protected]>
---
 arrow-arith/src/aggregate.rs                       |  2 --
 arrow-array/src/array/dictionary_array.rs          |  6 +---
 arrow-array/src/array/fixed_size_binary_array.rs   |  1 -
 arrow-array/src/array/map_array.rs                 |  1 -
 arrow-array/src/array/primitive_array.rs           |  3 +-
 arrow-array/src/array/run_array.rs                 |  6 ++--
 arrow-array/src/array/struct_array.rs              |  1 -
 arrow-array/src/array/union_array.rs               |  1 -
 arrow-array/src/builder/boolean_builder.rs         |  1 -
 arrow-array/src/builder/buffer_builder.rs          |  1 -
 .../src/builder/fixed_size_binary_builder.rs       |  2 --
 arrow-array/src/builder/generic_bytes_builder.rs   |  2 +-
 .../builder/generic_bytes_dictionary_builder.rs    |  1 -
 arrow-array/src/builder/generic_list_builder.rs    |  2 +-
 arrow-array/src/builder/primitive_builder.rs       |  4 +--
 .../src/builder/primitive_dictionary_builder.rs    |  1 -
 arrow-array/src/builder/struct_builder.rs          |  3 +-
 arrow-array/src/timezone.rs                        |  2 +-
 arrow-buffer/src/bigint/mod.rs                     |  3 +-
 arrow-buffer/src/buffer/immutable.rs               |  1 -
 arrow-cast/src/base64.rs                           |  1 -
 arrow-cast/src/parse.rs                            |  3 +-
 arrow-data/src/data.rs                             |  1 -
 arrow-flight/src/decode.rs                         |  2 +-
 arrow-flight/src/lib.rs                            |  6 +---
 arrow-integration-test/src/lib.rs                  |  5 ---
 arrow-integration-test/src/schema.rs               |  2 +-
 .../flight_server_scenarios/integration_test.rs    |  2 --
 arrow-ipc/src/reader.rs                            |  1 -
 arrow-ipc/src/writer.rs                            |  2 --
 arrow-json/src/lib.rs                              |  5 +--
 arrow-json/src/reader/mod.rs                       | 12 ++-----
 arrow-ord/src/ord.rs                               |  2 --
 arrow-ord/src/sort.rs                              |  1 -
 arrow-row/src/lib.rs                               |  4 +--
 arrow-schema/src/datatype.rs                       |  1 -
 arrow-schema/src/field.rs                          |  3 --
 arrow-select/src/concat.rs                         |  2 --
 arrow-select/src/interleave.rs                     |  4 ---
 arrow-select/src/nullif.rs                         |  2 +-
 arrow-string/src/concat_elements.rs                |  1 -
 arrow-string/src/length.rs                         |  3 +-
 arrow-string/src/regexp.rs                         |  1 -
 arrow/benches/array_from_vec.rs                    |  2 +-
 arrow/benches/cast_kernels.rs                      |  1 -
 arrow/benches/comparison_kernels.rs                |  1 -
 arrow/benches/csv_reader.rs                        |  1 -
 arrow/benches/csv_writer.rs                        |  1 -
 arrow/benches/filter_kernels.rs                    |  1 -
 arrow/benches/partition_kernels.rs                 |  2 +-
 arrow/examples/dynamic_types.rs                    |  1 -
 arrow/src/array/ffi.rs                             |  3 --
 arrow/src/ffi.rs                                   |  7 ----
 arrow/src/ffi_stream.rs                            |  5 +--
 arrow/src/tensor.rs                                |  1 -
 arrow/src/util/data_gen.rs                         |  6 ++--
 arrow/src/util/test_util.rs                        |  1 -
 parquet/src/arrow/array_reader/mod.rs              |  1 +
 parquet/src/arrow/arrow_reader/mod.rs              |  1 -
 parquet/src/arrow/arrow_writer/levels.rs           |  3 --
 parquet/src/arrow/arrow_writer/mod.rs              |  5 +--
 parquet/src/arrow/buffer/dictionary_buffer.rs      |  2 +-
 parquet/src/bloom_filter/mod.rs                    |  3 --
 parquet/src/column/writer/mod.rs                   | 10 ++----
 parquet/src/compression.rs                         | 42 +++++++---------------
 parquet/src/data_type.rs                           |  1 -
 parquet/src/file/metadata.rs                       |  2 +-
 parquet/src/file/page_encoding_stats.rs            |  1 -
 parquet/src/file/reader.rs                         |  2 +-
 parquet/src/file/serialized_reader.rs              |  5 +--
 parquet/src/file/writer.rs                         |  1 -
 parquet/src/record/reader.rs                       |  6 ++--
 parquet/src/schema/printer.rs                      |  4 +--
 parquet/src/schema/types.rs                        |  2 +-
 parquet_derive/src/parquet_field.rs                |  2 +-
 75 files changed, 52 insertions(+), 184 deletions(-)

diff --git a/arrow-arith/src/aggregate.rs b/arrow-arith/src/aggregate.rs
index 20ff0711d73..190685ff9df 100644
--- a/arrow-arith/src/aggregate.rs
+++ b/arrow-arith/src/aggregate.rs
@@ -22,7 +22,6 @@ use arrow_array::iterator::ArrayIter;
 use arrow_array::*;
 use arrow_buffer::{ArrowNativeType, NullBuffer};
 use arrow_data::bit_iterator::try_for_each_valid_idx;
-use arrow_schema::ArrowError;
 use arrow_schema::*;
 use std::borrow::BorrowMut;
 use std::ops::{BitAnd, BitOr, BitXor};
@@ -729,7 +728,6 @@ where
 mod tests {
     use super::*;
     use arrow_array::types::*;
-    use arrow_buffer::NullBuffer;
     use std::sync::Arc;
 
     #[test]
diff --git a/arrow-array/src/array/dictionary_array.rs 
b/arrow-array/src/array/dictionary_array.rs
index 1f4d83b1c5d..abfae2112aa 100644
--- a/arrow-array/src/array/dictionary_array.rs
+++ b/arrow-array/src/array/dictionary_array.rs
@@ -20,8 +20,7 @@ use crate::cast::AsArray;
 use crate::iterator::ArrayIter;
 use crate::types::*;
 use crate::{
-    make_array, Array, ArrayAccessor, ArrayRef, ArrowNativeTypeOp, 
ArrowPrimitiveType,
-    PrimitiveArray, StringArray,
+    make_array, Array, ArrayAccessor, ArrayRef, ArrowNativeTypeOp, 
PrimitiveArray, StringArray,
 };
 use arrow_buffer::bit_util::set_bit;
 use arrow_buffer::buffer::NullBuffer;
@@ -1007,12 +1006,9 @@ impl<K: ArrowDictionaryKeyType> AnyDictionaryArray for 
DictionaryArray<K> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::builder::PrimitiveDictionaryBuilder;
     use crate::cast::as_dictionary_array;
-    use crate::types::{Int32Type, Int8Type, UInt32Type, UInt8Type};
     use crate::{Int16Array, Int32Array, Int8Array};
     use arrow_buffer::{Buffer, ToByteSlice};
-    use std::sync::Arc;
 
     #[test]
     fn test_dictionary_array() {
diff --git a/arrow-array/src/array/fixed_size_binary_array.rs 
b/arrow-array/src/array/fixed_size_binary_array.rs
index d89bbd5ad08..49cecb50e2d 100644
--- a/arrow-array/src/array/fixed_size_binary_array.rs
+++ b/arrow-array/src/array/fixed_size_binary_array.rs
@@ -636,7 +636,6 @@ impl<'a> IntoIterator for &'a FixedSizeBinaryArray {
 mod tests {
     use crate::RecordBatch;
     use arrow_schema::{Field, Schema};
-    use std::sync::Arc;
 
     use super::*;
 
diff --git a/arrow-array/src/array/map_array.rs 
b/arrow-array/src/array/map_array.rs
index bde7fdd5a95..ed22ba1d8a3 100644
--- a/arrow-array/src/array/map_array.rs
+++ b/arrow-array/src/array/map_array.rs
@@ -441,7 +441,6 @@ mod tests {
     use crate::types::UInt32Type;
     use crate::{Int32Array, UInt32Array};
     use arrow_schema::Fields;
-    use std::sync::Arc;
 
     use super::*;
 
diff --git a/arrow-array/src/array/primitive_array.rs 
b/arrow-array/src/array/primitive_array.rs
index 3ae6f59b59a..ddae770d383 100644
--- a/arrow-array/src/array/primitive_array.rs
+++ b/arrow-array/src/array/primitive_array.rs
@@ -1501,9 +1501,8 @@ mod tests {
     use super::*;
     use crate::builder::{Decimal128Builder, Decimal256Builder};
     use crate::cast::downcast_array;
-    use crate::{ArrayRef, BooleanArray};
+    use crate::BooleanArray;
     use arrow_schema::TimeUnit;
-    use std::sync::Arc;
 
     #[test]
     fn test_primitive_array_from_vec() {
diff --git a/arrow-array/src/array/run_array.rs 
b/arrow-array/src/array/run_array.rs
index 4877f9f850a..aa8bb259a0e 100644
--- a/arrow-array/src/array/run_array.rs
+++ b/arrow-array/src/array/run_array.rs
@@ -654,8 +654,6 @@ where
 
 #[cfg(test)]
 mod tests {
-    use std::sync::Arc;
-
     use rand::seq::SliceRandom;
     use rand::thread_rng;
     use rand::Rng;
@@ -663,8 +661,8 @@ mod tests {
     use super::*;
     use crate::builder::PrimitiveRunBuilder;
     use crate::cast::AsArray;
-    use crate::types::{Int16Type, Int32Type, Int8Type, UInt32Type};
-    use crate::{Array, Int32Array, StringArray};
+    use crate::types::{Int8Type, UInt32Type};
+    use crate::{Int32Array, StringArray};
 
     fn build_input_array(size: usize) -> Vec<Option<i32>> {
         // The input array is created by shuffling and repeating
diff --git a/arrow-array/src/array/struct_array.rs 
b/arrow-array/src/array/struct_array.rs
index 699da28cf7a..ae292944e37 100644
--- a/arrow-array/src/array/struct_array.rs
+++ b/arrow-array/src/array/struct_array.rs
@@ -464,7 +464,6 @@ mod tests {
 
     use crate::{BooleanArray, Float32Array, Float64Array, Int32Array, 
Int64Array, StringArray};
     use arrow_buffer::ToByteSlice;
-    use std::sync::Arc;
 
     #[test]
     fn test_struct_array_builder() {
diff --git a/arrow-array/src/array/union_array.rs 
b/arrow-array/src/array/union_array.rs
index 63e927fd08a..e3e63724753 100644
--- a/arrow-array/src/array/union_array.rs
+++ b/arrow-array/src/array/union_array.rs
@@ -511,7 +511,6 @@ mod tests {
     use crate::RecordBatch;
     use crate::{Float64Array, Int32Array, Int64Array, StringArray};
     use arrow_schema::Schema;
-    use std::sync::Arc;
 
     #[test]
     fn test_dense_i32() {
diff --git a/arrow-array/src/builder/boolean_builder.rs 
b/arrow-array/src/builder/boolean_builder.rs
index 7e59d940a50..a4bda89d52e 100644
--- a/arrow-array/src/builder/boolean_builder.rs
+++ b/arrow-array/src/builder/boolean_builder.rs
@@ -217,7 +217,6 @@ impl Extend<Option<bool>> for BooleanBuilder {
 mod tests {
     use super::*;
     use crate::Array;
-    use arrow_buffer::Buffer;
 
     #[test]
     fn test_boolean_array_builder() {
diff --git a/arrow-array/src/builder/buffer_builder.rs 
b/arrow-array/src/builder/buffer_builder.rs
index 2b66a8187fa..ab67669febb 100644
--- a/arrow-array/src/builder/buffer_builder.rs
+++ b/arrow-array/src/builder/buffer_builder.rs
@@ -15,7 +15,6 @@
 // specific language governing permissions and limitations
 // under the License.
 
-use crate::array::ArrowPrimitiveType;
 pub use arrow_buffer::BufferBuilder;
 use half::f16;
 
diff --git a/arrow-array/src/builder/fixed_size_binary_builder.rs 
b/arrow-array/src/builder/fixed_size_binary_builder.rs
index 0a50eb8a50e..132c2e1939b 100644
--- a/arrow-array/src/builder/fixed_size_binary_builder.rs
+++ b/arrow-array/src/builder/fixed_size_binary_builder.rs
@@ -154,8 +154,6 @@ mod tests {
     use super::*;
 
     use crate::Array;
-    use crate::FixedSizeBinaryArray;
-    use arrow_schema::DataType;
 
     #[test]
     fn test_fixed_size_binary_builder() {
diff --git a/arrow-array/src/builder/generic_bytes_builder.rs 
b/arrow-array/src/builder/generic_bytes_builder.rs
index 2c7ee7a3e44..9939a85f940 100644
--- a/arrow-array/src/builder/generic_bytes_builder.rs
+++ b/arrow-array/src/builder/generic_bytes_builder.rs
@@ -262,7 +262,7 @@ pub type GenericBinaryBuilder<O> = 
GenericByteBuilder<GenericBinaryType<O>>;
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::array::{Array, OffsetSizeTrait};
+    use crate::array::Array;
     use crate::GenericStringArray;
 
     fn _test_generic_binary_builder<O: OffsetSizeTrait>() {
diff --git a/arrow-array/src/builder/generic_bytes_dictionary_builder.rs 
b/arrow-array/src/builder/generic_bytes_dictionary_builder.rs
index b0c722ae7cd..198d4fcbeb2 100644
--- a/arrow-array/src/builder/generic_bytes_dictionary_builder.rs
+++ b/arrow-array/src/builder/generic_bytes_dictionary_builder.rs
@@ -402,7 +402,6 @@ pub type LargeBinaryDictionaryBuilder<K> = 
GenericByteDictionaryBuilder<K, Gener
 mod tests {
     use super::*;
 
-    use crate::array::Array;
     use crate::array::Int8Array;
     use crate::types::{Int16Type, Int32Type, Int8Type, Utf8Type};
     use crate::{BinaryArray, StringArray};
diff --git a/arrow-array/src/builder/generic_list_builder.rs 
b/arrow-array/src/builder/generic_list_builder.rs
index b857224c5da..25903bcf546 100644
--- a/arrow-array/src/builder/generic_list_builder.rs
+++ b/arrow-array/src/builder/generic_list_builder.rs
@@ -354,7 +354,7 @@ mod tests {
     use crate::builder::{make_builder, Int32Builder, ListBuilder};
     use crate::cast::AsArray;
     use crate::types::Int32Type;
-    use crate::{Array, Int32Array};
+    use crate::Int32Array;
     use arrow_schema::DataType;
 
     fn _test_generic_list_array_builder<O: OffsetSizeTrait>() {
diff --git a/arrow-array/src/builder/primitive_builder.rs 
b/arrow-array/src/builder/primitive_builder.rs
index 0aad2dbfce0..39b27bfca89 100644
--- a/arrow-array/src/builder/primitive_builder.rs
+++ b/arrow-array/src/builder/primitive_builder.rs
@@ -17,7 +17,7 @@
 
 use crate::builder::{ArrayBuilder, BufferBuilder};
 use crate::types::*;
-use crate::{ArrayRef, ArrowPrimitiveType, PrimitiveArray};
+use crate::{ArrayRef, PrimitiveArray};
 use arrow_buffer::NullBufferBuilder;
 use arrow_buffer::{Buffer, MutableBuffer};
 use arrow_data::ArrayData;
@@ -359,7 +359,6 @@ impl<P: ArrowPrimitiveType> Extend<Option<P::Native>> for 
PrimitiveBuilder<P> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use arrow_buffer::Buffer;
     use arrow_schema::TimeUnit;
 
     use crate::array::Array;
@@ -367,7 +366,6 @@ mod tests {
     use crate::array::Date32Array;
     use crate::array::Int32Array;
     use crate::array::TimestampSecondArray;
-    use crate::builder::Int32Builder;
 
     #[test]
     fn test_primitive_array_builder_i32() {
diff --git a/arrow-array/src/builder/primitive_dictionary_builder.rs 
b/arrow-array/src/builder/primitive_dictionary_builder.rs
index a47b2d30d4f..a64ecf0caa1 100644
--- a/arrow-array/src/builder/primitive_dictionary_builder.rs
+++ b/arrow-array/src/builder/primitive_dictionary_builder.rs
@@ -319,7 +319,6 @@ impl<K: ArrowDictionaryKeyType, P: ArrowPrimitiveType> 
Extend<Option<P::Native>>
 mod tests {
     use super::*;
 
-    use crate::array::Array;
     use crate::array::UInt32Array;
     use crate::array::UInt8Array;
     use crate::builder::Decimal128Builder;
diff --git a/arrow-array/src/builder/struct_builder.rs 
b/arrow-array/src/builder/struct_builder.rs
index ce4aa48e9dd..eeb37cd8e66 100644
--- a/arrow-array/src/builder/struct_builder.rs
+++ b/arrow-array/src/builder/struct_builder.rs
@@ -16,10 +16,9 @@
 // under the License.
 
 use crate::builder::*;
-use crate::{ArrayRef, StructArray};
+use crate::StructArray;
 use arrow_buffer::NullBufferBuilder;
 use arrow_schema::{DataType, Fields, IntervalUnit, SchemaBuilder, TimeUnit};
-use std::any::Any;
 use std::sync::Arc;
 
 /// Builder for [`StructArray`]
diff --git a/arrow-array/src/timezone.rs b/arrow-array/src/timezone.rs
index dc91886f34c..b4df77deb4f 100644
--- a/arrow-array/src/timezone.rs
+++ b/arrow-array/src/timezone.rs
@@ -235,7 +235,7 @@ mod private {
 mod private {
     use super::*;
     use chrono::offset::TimeZone;
-    use chrono::{FixedOffset, LocalResult, NaiveDate, NaiveDateTime, Offset};
+    use chrono::{LocalResult, NaiveDate, NaiveDateTime, Offset};
     use std::str::FromStr;
 
     /// An [`Offset`] for [`Tz`]
diff --git a/arrow-buffer/src/bigint/mod.rs b/arrow-buffer/src/bigint/mod.rs
index afbb3a31df1..a8aaff13cd2 100644
--- a/arrow-buffer/src/bigint/mod.rs
+++ b/arrow-buffer/src/bigint/mod.rs
@@ -838,9 +838,8 @@ impl ToPrimitive for i256 {
 #[cfg(all(test, not(miri)))] // llvm.x86.subborrow.64 not supported by MIRI
 mod tests {
     use super::*;
-    use num::{BigInt, FromPrimitive, Signed, ToPrimitive};
+    use num::Signed;
     use rand::{thread_rng, Rng};
-    use std::ops::Neg;
 
     #[test]
     fn test_signed_cmp() {
diff --git a/arrow-buffer/src/buffer/immutable.rs 
b/arrow-buffer/src/buffer/immutable.rs
index 10b62a2ce47..552e3f1615c 100644
--- a/arrow-buffer/src/buffer/immutable.rs
+++ b/arrow-buffer/src/buffer/immutable.rs
@@ -17,7 +17,6 @@
 
 use std::alloc::Layout;
 use std::fmt::Debug;
-use std::iter::FromIterator;
 use std::ptr::NonNull;
 use std::sync::Arc;
 
diff --git a/arrow-cast/src/base64.rs b/arrow-cast/src/base64.rs
index e109c811248..319c7654828 100644
--- a/arrow-cast/src/base64.rs
+++ b/arrow-cast/src/base64.rs
@@ -86,7 +86,6 @@ pub fn b64_decode<E: Engine, O: OffsetSizeTrait>(
 mod tests {
     use super::*;
     use arrow_array::BinaryArray;
-    use base64::prelude::{BASE64_STANDARD, BASE64_STANDARD_NO_PAD};
     use rand::{thread_rng, Rng};
 
     fn test_engine<E: Engine>(e: &E, a: &BinaryArray) {
diff --git a/arrow-cast/src/parse.rs b/arrow-cast/src/parse.rs
index 6214e6d9737..7f23526142c 100644
--- a/arrow-cast/src/parse.rs
+++ b/arrow-cast/src/parse.rs
@@ -17,7 +17,7 @@
 
 use arrow_array::timezone::Tz;
 use arrow_array::types::*;
-use arrow_array::{ArrowNativeTypeOp, ArrowPrimitiveType};
+use arrow_array::ArrowNativeTypeOp;
 use arrow_buffer::ArrowNativeType;
 use arrow_schema::ArrowError;
 use chrono::prelude::*;
@@ -1222,7 +1222,6 @@ fn parse_interval_components(
 mod tests {
     use super::*;
     use arrow_array::temporal_conversions::date32_to_datetime;
-    use arrow_array::timezone::Tz;
     use arrow_buffer::i256;
 
     #[test]
diff --git a/arrow-data/src/data.rs b/arrow-data/src/data.rs
index 2ddc2d845b0..bd45c4f8ddd 100644
--- a/arrow-data/src/data.rs
+++ b/arrow-data/src/data.rs
@@ -22,7 +22,6 @@ use crate::bit_iterator::BitSliceIterator;
 use arrow_buffer::buffer::{BooleanBuffer, NullBuffer};
 use arrow_buffer::{bit_util, i256, ArrowNativeType, Buffer, MutableBuffer};
 use arrow_schema::{ArrowError, DataType, UnionMode};
-use std::convert::TryInto;
 use std::mem;
 use std::ops::Range;
 use std::sync::Arc;
diff --git a/arrow-flight/src/decode.rs b/arrow-flight/src/decode.rs
index 95bbe2b46bb..afbf033eb06 100644
--- a/arrow-flight/src/decode.rs
+++ b/arrow-flight/src/decode.rs
@@ -21,7 +21,7 @@ use arrow_buffer::Buffer;
 use arrow_schema::{Schema, SchemaRef};
 use bytes::Bytes;
 use futures::{ready, stream::BoxStream, Stream, StreamExt};
-use std::{collections::HashMap, convert::TryFrom, fmt::Debug, pin::Pin, 
sync::Arc, task::Poll};
+use std::{collections::HashMap, fmt::Debug, pin::Pin, sync::Arc, task::Poll};
 use tonic::metadata::MetadataMap;
 
 use crate::error::{FlightError, Result};
diff --git a/arrow-flight/src/lib.rs b/arrow-flight/src/lib.rs
index 434d19ce76f..a4b4ab7bc31 100644
--- a/arrow-flight/src/lib.rs
+++ b/arrow-flight/src/lib.rs
@@ -46,11 +46,7 @@ use base64::prelude::BASE64_STANDARD;
 use base64::Engine;
 use bytes::Bytes;
 use prost_types::Timestamp;
-use std::{
-    convert::{TryFrom, TryInto},
-    fmt,
-    ops::Deref,
-};
+use std::{fmt, ops::Deref};
 
 type ArrowResult<T> = std::result::Result<T, ArrowError>;
 
diff --git a/arrow-integration-test/src/lib.rs 
b/arrow-integration-test/src/lib.rs
index a312148dc91..d6e0dda51a8 100644
--- a/arrow-integration-test/src/lib.rs
+++ b/arrow-integration-test/src/lib.rs
@@ -34,9 +34,7 @@ use arrow::buffer::{Buffer, MutableBuffer};
 use arrow::compute;
 use arrow::datatypes::*;
 use arrow::error::{ArrowError, Result};
-use arrow::record_batch::{RecordBatch, RecordBatchReader};
 use arrow::util::bit_util;
-use arrow_buffer::i256;
 
 mod datatype;
 mod field;
@@ -1011,9 +1009,6 @@ mod tests {
 
     use std::fs::File;
     use std::io::Read;
-    use std::sync::Arc;
-
-    use arrow::buffer::Buffer;
 
     #[test]
     fn test_schema_equality() {
diff --git a/arrow-integration-test/src/schema.rs 
b/arrow-integration-test/src/schema.rs
index b5f6c5e86b3..541a1ec746a 100644
--- a/arrow-integration-test/src/schema.rs
+++ b/arrow-integration-test/src/schema.rs
@@ -101,7 +101,7 @@ struct MetadataKeyValue {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use arrow::datatypes::{DataType, Field, Fields, IntervalUnit, TimeUnit};
+    use arrow::datatypes::{DataType, Field, IntervalUnit, TimeUnit};
     use serde_json::Value;
     use std::sync::Arc;
 
diff --git 
a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs 
b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
index 623a240348f..25203ecb769 100644
--- a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
+++ b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
@@ -16,7 +16,6 @@
 // under the License.
 
 use std::collections::HashMap;
-use std::convert::TryFrom;
 use std::pin::Pin;
 use std::sync::Arc;
 
@@ -35,7 +34,6 @@ use arrow_flight::{
     PollInfo, PutResult, SchemaAsIpc, SchemaResult, Ticket,
 };
 use futures::{channel::mpsc, sink::SinkExt, Stream, StreamExt};
-use std::convert::TryInto;
 use tokio::sync::Mutex;
 use tonic::{transport::Server, Request, Response, Status, Streaming};
 
diff --git a/arrow-ipc/src/reader.rs b/arrow-ipc/src/reader.rs
index 361c4f7f67d..f015674d681 100644
--- a/arrow-ipc/src/reader.rs
+++ b/arrow-ipc/src/reader.rs
@@ -1190,7 +1190,6 @@ mod tests {
     use crate::root_as_message;
     use arrow_array::builder::{PrimitiveRunBuilder, UnionBuilder};
     use arrow_array::types::*;
-    use arrow_buffer::ArrowNativeType;
     use arrow_data::ArrayDataBuilder;
 
     fn create_test_projection_schema() -> Schema {
diff --git a/arrow-ipc/src/writer.rs b/arrow-ipc/src/writer.rs
index 99e52e2a707..4e32b04b0fb 100644
--- a/arrow-ipc/src/writer.rs
+++ b/arrow-ipc/src/writer.rs
@@ -1421,14 +1421,12 @@ fn pad_to_8(len: u32) -> usize {
 mod tests {
     use std::io::Cursor;
     use std::io::Seek;
-    use std::sync::Arc;
 
     use arrow_array::builder::GenericListBuilder;
     use arrow_array::builder::MapBuilder;
     use arrow_array::builder::UnionBuilder;
     use arrow_array::builder::{PrimitiveRunBuilder, UInt32Builder};
     use arrow_array::types::*;
-    use arrow_schema::DataType;
 
     use crate::reader::*;
     use crate::MetadataVersion;
diff --git a/arrow-json/src/lib.rs b/arrow-json/src/lib.rs
index e39882e5262..c7839a17150 100644
--- a/arrow-json/src/lib.rs
+++ b/arrow-json/src/lib.rs
@@ -144,10 +144,7 @@ impl JsonSerializable for f64 {
 mod tests {
     use super::*;
 
-    use serde_json::{
-        Number,
-        Value::{Bool, Number as VNumber, String as VString},
-    };
+    use serde_json::Value::{Bool, Number as VNumber, String as VString};
 
     #[test]
     fn test_arrow_native_type_to_json() {
diff --git a/arrow-json/src/reader/mod.rs b/arrow-json/src/reader/mod.rs
index 5afe0dec279..99055573345 100644
--- a/arrow-json/src/reader/mod.rs
+++ b/arrow-json/src/reader/mod.rs
@@ -140,7 +140,6 @@ use chrono::Utc;
 use serde::Serialize;
 
 use arrow_array::timezone::Tz;
-use arrow_array::types::Float32Type;
 use arrow_array::types::*;
 use arrow_array::{downcast_integer, make_array, RecordBatch, 
RecordBatchReader, StructArray};
 use arrow_data::ArrayData;
@@ -713,20 +712,13 @@ mod tests {
     use serde_json::json;
     use std::fs::File;
     use std::io::{BufReader, Cursor, Seek};
-    use std::sync::Arc;
 
     use arrow_array::cast::AsArray;
-    use arrow_array::types::Int32Type;
-    use arrow_array::{
-        make_array, Array, BooleanArray, Float64Array, ListArray, StringArray, 
StructArray,
-    };
+    use arrow_array::{Array, BooleanArray, Float64Array, ListArray, 
StringArray};
     use arrow_buffer::{ArrowNativeType, Buffer};
     use arrow_cast::display::{ArrayFormatter, FormatOptions};
     use arrow_data::ArrayDataBuilder;
-    use arrow_schema::{DataType, Field, FieldRef, Schema};
-
-    use crate::reader::infer_json_schema;
-    use crate::ReaderBuilder;
+    use arrow_schema::Field;
 
     use super::*;
 
diff --git a/arrow-ord/src/ord.rs b/arrow-ord/src/ord.rs
index f6bd39c9cd5..e793038de92 100644
--- a/arrow-ord/src/ord.rs
+++ b/arrow-ord/src/ord.rs
@@ -131,10 +131,8 @@ pub fn build_compare(left: &dyn Array, right: &dyn Array) 
-> Result<DynComparato
 #[cfg(test)]
 pub mod tests {
     use super::*;
-    use arrow_array::{FixedSizeBinaryArray, Float64Array, Int32Array};
     use arrow_buffer::{i256, OffsetBuffer};
     use half::f16;
-    use std::cmp::Ordering;
     use std::sync::Arc;
 
     #[test]
diff --git a/arrow-ord/src/sort.rs b/arrow-ord/src/sort.rs
index 2c06057a84e..fe3a1f86ac0 100644
--- a/arrow-ord/src/sort.rs
+++ b/arrow-ord/src/sort.rs
@@ -833,7 +833,6 @@ mod tests {
     use half::f16;
     use rand::rngs::StdRng;
     use rand::{Rng, RngCore, SeedableRng};
-    use std::sync::Arc;
 
     fn create_decimal128_array(data: Vec<Option<i128>>) -> Decimal128Array {
         data.into_iter()
diff --git a/arrow-row/src/lib.rs b/arrow-row/src/lib.rs
index 6fd92eaf914..c2f5293f94c 100644
--- a/arrow-row/src/lib.rs
+++ b/arrow-row/src/lib.rs
@@ -1303,8 +1303,6 @@ unsafe fn decode_column(
 
 #[cfg(test)]
 mod tests {
-    use std::sync::Arc;
-
     use rand::distributions::uniform::SampleUniform;
     use rand::distributions::{Distribution, Standard};
     use rand::{thread_rng, Rng};
@@ -1315,7 +1313,7 @@ mod tests {
     use arrow_buffer::i256;
     use arrow_buffer::Buffer;
     use arrow_cast::display::array_value_to_string;
-    use arrow_ord::sort::{LexicographicalComparator, SortColumn, SortOptions};
+    use arrow_ord::sort::{LexicographicalComparator, SortColumn};
 
     use super::*;
 
diff --git a/arrow-schema/src/datatype.rs b/arrow-schema/src/datatype.rs
index b3d89b011e6..89c001b0e65 100644
--- a/arrow-schema/src/datatype.rs
+++ b/arrow-schema/src/datatype.rs
@@ -667,7 +667,6 @@ pub const DECIMAL_DEFAULT_SCALE: i8 = 10;
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::{Field, UnionMode};
 
     #[test]
     #[cfg(feature = "serde")]
diff --git a/arrow-schema/src/field.rs b/arrow-schema/src/field.rs
index 70a3e2b21a3..0770cf41a02 100644
--- a/arrow-schema/src/field.rs
+++ b/arrow-schema/src/field.rs
@@ -580,10 +580,7 @@ impl std::fmt::Display for Field {
 #[cfg(test)]
 mod test {
     use super::*;
-    use crate::Fields;
     use std::collections::hash_map::DefaultHasher;
-    use std::hash::{Hash, Hasher};
-    use std::sync::Arc;
 
     #[test]
     fn test_new_with_string() {
diff --git a/arrow-select/src/concat.rs b/arrow-select/src/concat.rs
index 695903195d2..f98e85475a2 100644
--- a/arrow-select/src/concat.rs
+++ b/arrow-select/src/concat.rs
@@ -195,9 +195,7 @@ pub fn concat_batches<'a>(
 mod tests {
     use super::*;
     use arrow_array::builder::StringDictionaryBuilder;
-    use arrow_array::cast::AsArray;
     use arrow_schema::{Field, Schema};
-    use std::sync::Arc;
 
     #[test]
     fn test_concat_empty_vec() {
diff --git a/arrow-select/src/interleave.rs b/arrow-select/src/interleave.rs
index 8229a8f3fe0..fccc02ac939 100644
--- a/arrow-select/src/interleave.rs
+++ b/arrow-select/src/interleave.rs
@@ -267,10 +267,6 @@ fn interleave_fallback(
 mod tests {
     use super::*;
     use arrow_array::builder::{Int32Builder, ListBuilder};
-    use arrow_array::cast::AsArray;
-    use arrow_array::types::Int32Type;
-    use arrow_array::{Int32Array, ListArray, StringArray};
-    use arrow_schema::DataType;
 
     #[test]
     fn test_primitive() {
diff --git a/arrow-select/src/nullif.rs b/arrow-select/src/nullif.rs
index 4025a5bacf8..a7848c16a8e 100644
--- a/arrow-select/src/nullif.rs
+++ b/arrow-select/src/nullif.rs
@@ -102,7 +102,7 @@ mod tests {
     use arrow_array::types::Int32Type;
     use arrow_array::{Int32Array, NullArray, StringArray, StructArray};
     use arrow_data::ArrayData;
-    use arrow_schema::{DataType, Field, Fields};
+    use arrow_schema::{Field, Fields};
     use rand::{thread_rng, Rng};
 
     #[test]
diff --git a/arrow-string/src/concat_elements.rs 
b/arrow-string/src/concat_elements.rs
index 66ecd34868a..cb60363d332 100644
--- a/arrow-string/src/concat_elements.rs
+++ b/arrow-string/src/concat_elements.rs
@@ -207,7 +207,6 @@ pub fn concat_elements_dyn(left: &dyn Array, right: &dyn 
Array) -> Result<ArrayR
 #[cfg(test)]
 mod tests {
     use super::*;
-    use arrow_array::StringArray;
     #[test]
     fn test_string_concat() {
         let left = [Some("foo"), Some("bar"), None]
diff --git a/arrow-string/src/length.rs b/arrow-string/src/length.rs
index 1dd5933ce0e..79fa4602691 100644
--- a/arrow-string/src/length.rs
+++ b/arrow-string/src/length.rs
@@ -141,8 +141,7 @@ pub fn bit_length(array: &dyn Array) -> Result<ArrayRef, 
ArrowError> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use arrow_array::cast::AsArray;
-    use arrow_buffer::{Buffer, NullBuffer};
+    use arrow_buffer::Buffer;
     use arrow_data::ArrayData;
     use arrow_schema::Field;
 
diff --git a/arrow-string/src/regexp.rs b/arrow-string/src/regexp.rs
index 5e539b91b49..f79eff4b6ea 100644
--- a/arrow-string/src/regexp.rs
+++ b/arrow-string/src/regexp.rs
@@ -404,7 +404,6 @@ pub fn regexp_match(
 #[cfg(test)]
 mod tests {
     use super::*;
-    use arrow_array::{ListArray, StringArray};
 
     #[test]
     fn match_single_group() {
diff --git a/arrow/benches/array_from_vec.rs b/arrow/benches/array_from_vec.rs
index 5fce3f113e4..fd83ad5c2a1 100644
--- a/arrow/benches/array_from_vec.rs
+++ b/arrow/benches/array_from_vec.rs
@@ -25,7 +25,7 @@ extern crate arrow;
 use arrow::array::*;
 use arrow_buffer::i256;
 use rand::Rng;
-use std::{convert::TryFrom, sync::Arc};
+use std::sync::Arc;
 
 fn array_from_vec(n: usize) {
     let v: Vec<i32> = (0..n as i32).collect();
diff --git a/arrow/benches/cast_kernels.rs b/arrow/benches/cast_kernels.rs
index 1877ed0e789..228408e5711 100644
--- a/arrow/benches/cast_kernels.rs
+++ b/arrow/benches/cast_kernels.rs
@@ -31,7 +31,6 @@ use arrow::compute::cast;
 use arrow::datatypes::*;
 use arrow::util::bench_util::*;
 use arrow::util::test_util::seedable_rng;
-use arrow_buffer::i256;
 
 fn build_array<T: ArrowPrimitiveType>(size: usize) -> ArrayRef
 where
diff --git a/arrow/benches/comparison_kernels.rs 
b/arrow/benches/comparison_kernels.rs
index 02de70c5d79..a272144b52e 100644
--- a/arrow/benches/comparison_kernels.rs
+++ b/arrow/benches/comparison_kernels.rs
@@ -25,7 +25,6 @@ use arrow::compute::kernels::cmp::*;
 use arrow::datatypes::IntervalMonthDayNanoType;
 use arrow::util::bench_util::*;
 use arrow::{array::*, datatypes::Float32Type, datatypes::Int32Type};
-use arrow_array::Scalar;
 use arrow_string::like::*;
 use arrow_string::regexp::regexp_is_match_utf8_scalar;
 
diff --git a/arrow/benches/csv_reader.rs b/arrow/benches/csv_reader.rs
index b5afac1f6a4..38e091548be 100644
--- a/arrow/benches/csv_reader.rs
+++ b/arrow/benches/csv_reader.rs
@@ -27,7 +27,6 @@ use rand::Rng;
 use arrow::array::*;
 use arrow::csv;
 use arrow::datatypes::*;
-use arrow::record_batch::RecordBatch;
 use arrow::util::bench_util::{create_primitive_array, 
create_string_array_with_len};
 use arrow::util::test_util::seedable_rng;
 
diff --git a/arrow/benches/csv_writer.rs b/arrow/benches/csv_writer.rs
index 0c13428c916..85bd8ca383d 100644
--- a/arrow/benches/csv_writer.rs
+++ b/arrow/benches/csv_writer.rs
@@ -23,7 +23,6 @@ use criterion::*;
 use arrow::array::*;
 use arrow::csv;
 use arrow::datatypes::*;
-use arrow::record_batch::RecordBatch;
 use std::env;
 use std::fs::File;
 use std::sync::Arc;
diff --git a/arrow/benches/filter_kernels.rs b/arrow/benches/filter_kernels.rs
index 65726a27100..50f3cb40094 100644
--- a/arrow/benches/filter_kernels.rs
+++ b/arrow/benches/filter_kernels.rs
@@ -19,7 +19,6 @@ extern crate arrow;
 use std::sync::Arc;
 
 use arrow::compute::{filter_record_batch, FilterBuilder, FilterPredicate};
-use arrow::record_batch::RecordBatch;
 use arrow::util::bench_util::*;
 
 use arrow::array::*;
diff --git a/arrow/benches/partition_kernels.rs 
b/arrow/benches/partition_kernels.rs
index 85cafbe47a1..fce8634a10a 100644
--- a/arrow/benches/partition_kernels.rs
+++ b/arrow/benches/partition_kernels.rs
@@ -24,7 +24,7 @@ use arrow::compute::kernels::sort::{lexsort, SortColumn};
 use arrow::util::bench_util::*;
 use arrow::{
     array::*,
-    datatypes::{ArrowPrimitiveType, Float64Type, UInt8Type},
+    datatypes::{Float64Type, UInt8Type},
 };
 use arrow_ord::partition::partition;
 use rand::distributions::{Distribution, Standard};
diff --git a/arrow/examples/dynamic_types.rs b/arrow/examples/dynamic_types.rs
index 4c01f0ea8c7..b866cb7e6b1 100644
--- a/arrow/examples/dynamic_types.rs
+++ b/arrow/examples/dynamic_types.rs
@@ -26,7 +26,6 @@ use arrow::error::Result;
 
 #[cfg(feature = "prettyprint")]
 use arrow::util::pretty::print_batches;
-use arrow_schema::Fields;
 
 fn main() -> Result<()> {
     // define schema
diff --git a/arrow/src/array/ffi.rs b/arrow/src/array/ffi.rs
index d4d95a6e177..43f54a03842 100644
--- a/arrow/src/array/ffi.rs
+++ b/arrow/src/array/ffi.rs
@@ -17,8 +17,6 @@
 
 //! Contains functionality to load an ArrayData from the C Data Interface
 
-use std::convert::TryFrom;
-
 use crate::{error::Result, ffi};
 
 use super::ArrayRef;
@@ -61,7 +59,6 @@ mod tests {
         datatypes::{DataType, Field},
         ffi::{from_ffi, FFI_ArrowArray, FFI_ArrowSchema},
     };
-    use std::convert::TryFrom;
     use std::sync::Arc;
 
     fn test_round_trip(expected: &ArrayData) -> Result<()> {
diff --git a/arrow/src/ffi.rs b/arrow/src/ffi.rs
index d867f7c30d1..fe3f413924b 100644
--- a/arrow/src/ffi.rs
+++ b/arrow/src/ffi.rs
@@ -461,7 +461,6 @@ impl<'a> ImportedArrowArray<'a> {
 #[cfg(test)]
 mod tests {
     use std::collections::HashMap;
-    use std::convert::TryFrom;
     use std::mem::ManuallyDrop;
     use std::ptr::addr_of_mut;
 
@@ -470,12 +469,6 @@ mod tests {
     use arrow_array::types::{Float64Type, Int32Type};
     use arrow_array::*;
 
-    use crate::array::{
-        make_array, Array, ArrayData, BooleanArray, DictionaryArray, 
DurationSecondArray,
-        FixedSizeBinaryArray, FixedSizeListArray, GenericBinaryArray, 
GenericListArray,
-        GenericStringArray, Int32Array, MapArray, OffsetSizeTrait, 
Time32MillisecondArray,
-        TimestampMillisecondArray, UInt32Array,
-    };
     use crate::compute::kernels;
     use crate::datatypes::{Field, Int8Type};
 
diff --git a/arrow/src/ffi_stream.rs b/arrow/src/ffi_stream.rs
index 06128a1c998..15b88ef3216 100644
--- a/arrow/src/ffi_stream.rs
+++ b/arrow/src/ffi_stream.rs
@@ -58,7 +58,6 @@ use arrow_schema::DataType;
 use std::ffi::CStr;
 use std::ptr::addr_of;
 use std::{
-    convert::TryFrom,
     ffi::CString,
     os::raw::{c_char, c_int, c_void},
     sync::Arc,
@@ -392,12 +391,10 @@ pub unsafe fn export_reader_into_raw(
 
 #[cfg(test)]
 mod tests {
-    use arrow_schema::DataType;
-
     use super::*;
 
     use crate::array::Int32Array;
-    use crate::datatypes::{Field, Schema};
+    use crate::datatypes::Field;
 
     struct TestRecordBatchReader {
         schema: SchemaRef,
diff --git a/arrow/src/tensor.rs b/arrow/src/tensor.rs
index c2a262b399d..f236e6422cd 100644
--- a/arrow/src/tensor.rs
+++ b/arrow/src/tensor.rs
@@ -308,7 +308,6 @@ mod tests {
     use super::*;
 
     use crate::array::*;
-    use crate::buffer::Buffer;
 
     #[test]
     fn test_compute_row_major_strides() {
diff --git a/arrow/src/util/data_gen.rs b/arrow/src/util/data_gen.rs
index 5733fdf22ad..c63aa6bba3e 100644
--- a/arrow/src/util/data_gen.rs
+++ b/arrow/src/util/data_gen.rs
@@ -17,13 +17,12 @@
 
 //! Utilities to generate random arrays and batches
 
-use std::{convert::TryFrom, sync::Arc};
+use std::sync::Arc;
 
 use rand::{distributions::uniform::SampleUniform, Rng};
 
+use crate::array::*;
 use crate::error::{ArrowError, Result};
-use crate::record_batch::{RecordBatch, RecordBatchOptions};
-use crate::{array::*, datatypes::SchemaRef};
 use crate::{
     buffer::{Buffer, MutableBuffer},
     datatypes::*,
@@ -244,7 +243,6 @@ fn create_random_null_buffer(size: usize, null_density: 
f32) -> Buffer {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use arrow_schema::Fields;
 
     #[test]
     fn test_create_batch() {
diff --git a/arrow/src/util/test_util.rs b/arrow/src/util/test_util.rs
index fd051dea1a8..2d718d392ba 100644
--- a/arrow/src/util/test_util.rs
+++ b/arrow/src/util/test_util.rs
@@ -203,7 +203,6 @@ impl<T: Clone> Iterator for BadIterator<T> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use std::env;
 
     #[test]
     fn test_data_dir() {
diff --git a/parquet/src/arrow/array_reader/mod.rs 
b/parquet/src/arrow/array_reader/mod.rs
index c4e9fc5fa06..4ae0f5669e8 100644
--- a/parquet/src/arrow/array_reader/mod.rs
+++ b/parquet/src/arrow/array_reader/mod.rs
@@ -47,6 +47,7 @@ mod test_util;
 pub use builder::build_array_reader;
 pub use byte_array::make_byte_array_reader;
 pub use byte_array_dictionary::make_byte_array_dictionary_reader;
+#[allow(unused_imports)] // Only used for benchmarks
 pub use fixed_len_byte_array::make_fixed_len_byte_array_reader;
 pub use fixed_size_list_array::FixedSizeListArrayReader;
 pub use list_array::ListArrayReader;
diff --git a/parquet/src/arrow/arrow_reader/mod.rs 
b/parquet/src/arrow/arrow_reader/mod.rs
index 7aeb3d127ac..a34ce77f277 100644
--- a/parquet/src/arrow/arrow_reader/mod.rs
+++ b/parquet/src/arrow/arrow_reader/mod.rs
@@ -741,7 +741,6 @@ mod tests {
         Decimal128Type, Decimal256Type, DecimalType, Float16Type, Float32Type, 
Float64Type,
     };
     use arrow_array::*;
-    use arrow_array::{RecordBatch, RecordBatchReader};
     use arrow_buffer::{i256, ArrowNativeType, Buffer};
     use arrow_data::ArrayDataBuilder;
     use arrow_schema::{DataType as ArrowDataType, Field, Fields, Schema};
diff --git a/parquet/src/arrow/arrow_writer/levels.rs 
b/parquet/src/arrow/arrow_writer/levels.rs
index 1f307822415..955896010b8 100644
--- a/parquet/src/arrow/arrow_writer/levels.rs
+++ b/parquet/src/arrow/arrow_writer/levels.rs
@@ -617,10 +617,7 @@ impl ArrayLevels {
 mod tests {
     use super::*;
 
-    use std::sync::Arc;
-
     use arrow_array::builder::*;
-    use arrow_array::cast::AsArray;
     use arrow_array::types::Int32Type;
     use arrow_array::*;
     use arrow_buffer::{Buffer, ToByteSlice};
diff --git a/parquet/src/arrow/arrow_writer/mod.rs 
b/parquet/src/arrow/arrow_writer/mod.rs
index c72a8af6ce6..18c8617e07e 100644
--- a/parquet/src/arrow/arrow_writer/mod.rs
+++ b/parquet/src/arrow/arrow_writer/mod.rs
@@ -1002,18 +1002,15 @@ fn get_fsb_array_slice(
 mod tests {
     use super::*;
 
-    use bytes::Bytes;
     use std::fs::File;
-    use std::sync::Arc;
 
     use crate::arrow::arrow_reader::{ParquetRecordBatchReader, 
ParquetRecordBatchReaderBuilder};
     use crate::arrow::ARROW_SCHEMA_META_KEY;
     use arrow::datatypes::ToByteSlice;
-    use arrow::datatypes::{DataType, Field, Schema, UInt32Type, UInt8Type};
+    use arrow::datatypes::{DataType, Schema};
     use arrow::error::Result as ArrowResult;
     use arrow::util::pretty::pretty_format_batches;
     use arrow::{array::*, buffer::Buffer};
-    use arrow_array::RecordBatch;
     use arrow_buffer::NullBuffer;
     use arrow_schema::Fields;
 
diff --git a/parquet/src/arrow/buffer/dictionary_buffer.rs 
b/parquet/src/arrow/buffer/dictionary_buffer.rs
index 9e5b2293aa0..59f1cfa056a 100644
--- a/parquet/src/arrow/buffer/dictionary_buffer.rs
+++ b/parquet/src/arrow/buffer/dictionary_buffer.rs
@@ -208,7 +208,7 @@ impl<K: ArrowNativeType, V: OffsetSizeTrait> ValuesBuffer 
for DictionaryBuffer<K
 mod tests {
     use super::*;
     use arrow::compute::cast;
-    use arrow_array::{Array, StringArray};
+    use arrow_array::StringArray;
 
     #[test]
     fn test_dictionary_buffer() {
diff --git a/parquet/src/bloom_filter/mod.rs b/parquet/src/bloom_filter/mod.rs
index 897cce7620a..d99c7251902 100644
--- a/parquet/src/bloom_filter/mod.rs
+++ b/parquet/src/bloom_filter/mod.rs
@@ -344,9 +344,6 @@ fn hash_as_bytes<A: AsBytes + ?Sized>(value: &A) -> u64 {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::format::{
-        BloomFilterAlgorithm, BloomFilterCompression, SplitBlockAlgorithm, 
Uncompressed, XxHash,
-    };
 
     #[test]
     fn test_hash_bytes() {
diff --git a/parquet/src/column/writer/mod.rs b/parquet/src/column/writer/mod.rs
index e993cb4c11a..9ef505cc7cf 100644
--- a/parquet/src/column/writer/mod.rs
+++ b/parquet/src/column/writer/mod.rs
@@ -1292,9 +1292,7 @@ fn increment_utf8(mut data: Vec<u8>) -> Option<Vec<u8>> {
 
 #[cfg(test)]
 mod tests {
-    use crate::{file::properties::DEFAULT_COLUMN_INDEX_TRUNCATE_LENGTH, 
format::BoundaryOrder};
-    use bytes::Bytes;
-    use half::f16;
+    use crate::file::properties::DEFAULT_COLUMN_INDEX_TRUNCATE_LENGTH;
     use rand::distributions::uniform::SampleUniform;
     use std::sync::Arc;
 
@@ -1304,11 +1302,9 @@ mod tests {
     };
     use crate::file::writer::TrackedWrite;
     use crate::file::{
-        properties::{ReaderProperties, WriterProperties},
-        reader::SerializedPageReader,
-        writer::SerializedPageWriter,
+        properties::ReaderProperties, reader::SerializedPageReader, 
writer::SerializedPageWriter,
     };
-    use crate::schema::types::{ColumnDescriptor, ColumnPath, Type as 
SchemaType};
+    use crate::schema::types::{ColumnPath, Type as SchemaType};
     use crate::util::test_common::rand_gen::random_numbers_range;
 
     use super::*;
diff --git a/parquet/src/compression.rs b/parquet/src/compression.rs
index a9a1afbbf21..89f4b64d48b 100644
--- a/parquet/src/compression.rs
+++ b/parquet/src/compression.rs
@@ -144,10 +144,7 @@ pub(crate) trait CompressionLevel<T: std::fmt::Display + 
std::cmp::PartialOrd> {
 /// Given the compression type `codec`, returns a codec used to compress and 
decompress
 /// bytes for the compression type.
 /// This returns `None` if the codec type is `UNCOMPRESSED`.
-pub fn create_codec(
-    codec: CodecType,
-    _options: &CodecOptions,
-) -> Result<Option<Box<dyn Codec>>> {
+pub fn create_codec(codec: CodecType, _options: &CodecOptions) -> 
Result<Option<Box<dyn Codec>>> {
     match codec {
         #[cfg(any(feature = "brotli", test))]
         CodecType::BROTLI(level) => 
Ok(Some(Box::new(BrotliCodec::new(level)))),
@@ -260,8 +257,7 @@ mod gzip_codec {
         }
 
         fn compress(&mut self, input_buf: &[u8], output_buf: &mut Vec<u8>) -> 
Result<()> {
-            let mut encoder =
-                write::GzEncoder::new(output_buf, 
Compression::new(self.level.0));
+            let mut encoder = write::GzEncoder::new(output_buf, 
Compression::new(self.level.0));
             encoder.write_all(input_buf)?;
             encoder.try_finish().map_err(|e| e.into())
         }
@@ -441,7 +437,8 @@ mod lz4_codec {
         }
     }
 }
-#[cfg(any(feature = "lz4", test))]
+
+#[cfg(all(feature = "experimental", any(feature = "lz4", test)))]
 pub use lz4_codec::*;
 
 #[cfg(any(feature = "zstd", test))]
@@ -619,10 +616,7 @@ mod lz4_hadoop_codec {
     /// Adapted from pola-rs 
[compression.rs:try_decompress_hadoop](https://pola-rs.github.io/polars/src/parquet2/compression.rs.html#225)
     /// Translated from the apache arrow c++ function 
[TryDecompressHadoop](https://github.com/apache/arrow/blob/bf18e6e4b5bb6180706b1ba0d597a65a4ce5ca48/cpp/src/arrow/util/compression_lz4.cc#L474).
     /// Returns error if decompression failed.
-    fn try_decompress_hadoop(
-        input_buf: &[u8],
-        output_buf: &mut [u8],
-    ) -> io::Result<usize> {
+    fn try_decompress_hadoop(input_buf: &[u8], output_buf: &mut [u8]) -> 
io::Result<usize> {
         // Parquet files written with the Hadoop Lz4Codec use their own 
framing.
         // The input buffer can contain an arbitrary number of "frames", each
         // with the following structure:
@@ -660,11 +654,9 @@ mod lz4_hadoop_codec {
                     "Not enough bytes to hold advertised output",
                 ));
             }
-            let decompressed_size = lz4_flex::decompress_into(
-                &input[..expected_compressed_size as usize],
-                output,
-            )
-            .map_err(|e| ParquetError::External(Box::new(e)))?;
+            let decompressed_size =
+                lz4_flex::decompress_into(&input[..expected_compressed_size as 
usize], output)
+                    .map_err(|e| ParquetError::External(Box::new(e)))?;
             if decompressed_size != expected_decompressed_size as usize {
                 return Err(io::Error::new(
                     io::ErrorKind::Other,
@@ -712,8 +704,7 @@ mod lz4_hadoop_codec {
                 Ok(n) => {
                     if n != required_len {
                         return Err(ParquetError::General(
-                            "LZ4HadoopCodec uncompress_size is not the 
expected one"
-                                .into(),
+                            "LZ4HadoopCodec uncompress_size is not the 
expected one".into(),
                         ));
                     }
                     Ok(n)
@@ -724,20 +715,12 @@ mod lz4_hadoop_codec {
                 Err(_) => {
                     // Truncate any inserted element before tryingg next 
algorithm.
                     output_buf.truncate(output_len);
-                    match LZ4Codec::new().decompress(
-                        input_buf,
-                        output_buf,
-                        uncompress_size,
-                    ) {
+                    match LZ4Codec::new().decompress(input_buf, output_buf, 
uncompress_size) {
                         Ok(n) => Ok(n),
                         Err(_) => {
                             // Truncate any inserted element before tryingg 
next algorithm.
                             output_buf.truncate(output_len);
-                            LZ4RawCodec::new().decompress(
-                                input_buf,
-                                output_buf,
-                                uncompress_size,
-                            )
+                            LZ4RawCodec::new().decompress(input_buf, 
output_buf, uncompress_size)
                         }
                     }
                 }
@@ -759,8 +742,7 @@ mod lz4_hadoop_codec {
             let compressed_size = compressed_size as u32;
             let uncompressed_size = input_buf.len() as u32;
             
output_buf[..SIZE_U32].copy_from_slice(&uncompressed_size.to_be_bytes());
-            output_buf[SIZE_U32..PREFIX_LEN]
-                .copy_from_slice(&compressed_size.to_be_bytes());
+            
output_buf[SIZE_U32..PREFIX_LEN].copy_from_slice(&compressed_size.to_be_bytes());
 
             Ok(())
         }
diff --git a/parquet/src/data_type.rs b/parquet/src/data_type.rs
index 86da7a3acee..5e1d53badba 100644
--- a/parquet/src/data_type.rs
+++ b/parquet/src/data_type.rs
@@ -587,7 +587,6 @@ pub(crate) mod private {
     use crate::util::bit_util::{read_num_bytes, BitReader, BitWriter};
 
     use crate::basic::Type;
-    use std::convert::TryInto;
 
     use super::{ParquetError, Result, SliceAsBytes};
 
diff --git a/parquet/src/file/metadata.rs b/parquet/src/file/metadata.rs
index acd3a9f938c..c9232d83e80 100644
--- a/parquet/src/file/metadata.rs
+++ b/parquet/src/file/metadata.rs
@@ -1003,7 +1003,7 @@ impl OffsetIndexBuilder {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::basic::{Encoding, PageType};
+    use crate::basic::PageType;
 
     #[test]
     fn test_row_group_metadata_thrift_conversion() {
diff --git a/parquet/src/file/page_encoding_stats.rs 
b/parquet/src/file/page_encoding_stats.rs
index c941d401175..edb6a8fa9d4 100644
--- a/parquet/src/file/page_encoding_stats.rs
+++ b/parquet/src/file/page_encoding_stats.rs
@@ -63,7 +63,6 @@ pub fn to_thrift(encoding_stats: &PageEncodingStats) -> 
TPageEncodingStats {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::basic::{Encoding, PageType};
 
     #[test]
     fn test_page_encoding_stats_from_thrift() {
diff --git a/parquet/src/file/reader.rs b/parquet/src/file/reader.rs
index dd6a0fdd231..cff921b20a9 100644
--- a/parquet/src/file/reader.rs
+++ b/parquet/src/file/reader.rs
@@ -22,7 +22,7 @@
 use bytes::{Buf, Bytes};
 use std::fs::File;
 use std::io::{BufReader, Seek, SeekFrom};
-use std::{boxed::Box, io::Read, sync::Arc};
+use std::{io::Read, sync::Arc};
 
 use crate::bloom_filter::Sbbf;
 use crate::column::page::PageIterator;
diff --git a/parquet/src/file/serialized_reader.rs 
b/parquet/src/file/serialized_reader.rs
index 5c47e5a83ce..ac7d2d28748 100644
--- a/parquet/src/file/serialized_reader.rs
+++ b/parquet/src/file/serialized_reader.rs
@@ -20,7 +20,7 @@
 
 use std::collections::VecDeque;
 use std::iter;
-use std::{convert::TryFrom, fs::File, io::Read, path::Path, sync::Arc};
+use std::{fs::File, io::Read, path::Path, sync::Arc};
 
 use crate::basic::{Encoding, Type};
 use crate::bloom_filter::Sbbf;
@@ -769,9 +769,6 @@ impl<R: ChunkReader> PageReader for SerializedPageReader<R> 
{
 
 #[cfg(test)]
 mod tests {
-    use bytes::Bytes;
-    use std::sync::Arc;
-
     use crate::format::BoundaryOrder;
 
     use crate::basic::{self, ColumnOrder};
diff --git a/parquet/src/file/writer.rs b/parquet/src/file/writer.rs
index cfcd82c6489..7806384cdb5 100644
--- a/parquet/src/file/writer.rs
+++ b/parquet/src/file/writer.rs
@@ -807,7 +807,6 @@ mod tests {
     use crate::data_type::{BoolType, Int32Type};
     use crate::file::page_index::index::Index;
     use crate::file::properties::EnabledStatistics;
-    use crate::file::reader::ChunkReader;
     use crate::file::serialized_reader::ReadOptionsBuilder;
     use crate::file::{
         properties::{ReaderProperties, WriterProperties, WriterVersion},
diff --git a/parquet/src/record/reader.rs b/parquet/src/record/reader.rs
index c6bf8f1f93e..d74dcd276e8 100644
--- a/parquet/src/record/reader.rs
+++ b/parquet/src/record/reader.rs
@@ -804,14 +804,12 @@ mod tests {
     use super::*;
 
     use crate::data_type::Int64Type;
-    use crate::errors::Result;
-    use crate::file::reader::{FileReader, SerializedFileReader};
+    use crate::file::reader::SerializedFileReader;
     use crate::file::writer::SerializedFileWriter;
-    use crate::record::api::{Field, Row, RowAccessor};
+    use crate::record::api::RowAccessor;
     use crate::schema::parser::parse_message_type;
     use crate::util::test_common::file_util::{get_test_file, get_test_path};
     use bytes::Bytes;
-    use std::convert::TryFrom;
 
     // Convenient macros to assemble row, list, map, and group.
 
diff --git a/parquet/src/schema/printer.rs b/parquet/src/schema/printer.rs
index 2dec8a5be9f..0bbf2af748f 100644
--- a/parquet/src/schema/printer.rs
+++ b/parquet/src/schema/printer.rs
@@ -384,9 +384,9 @@ mod tests {
 
     use std::sync::Arc;
 
-    use crate::basic::{LogicalType, Repetition, Type as PhysicalType};
+    use crate::basic::{Repetition, Type as PhysicalType};
     use crate::errors::Result;
-    use crate::schema::{parser::parse_message_type, types::Type};
+    use crate::schema::parser::parse_message_type;
 
     fn assert_print_parse_message(message: Type) {
         let mut s = String::new();
diff --git a/parquet/src/schema/types.rs b/parquet/src/schema/types.rs
index c913f13c174..dbf6e8dcb3b 100644
--- a/parquet/src/schema/types.rs
+++ b/parquet/src/schema/types.rs
@@ -17,7 +17,7 @@
 
 //! Contains structs and methods to build Parquet schema and schema 
descriptors.
 
-use std::{collections::HashMap, convert::From, fmt, sync::Arc};
+use std::{collections::HashMap, fmt, sync::Arc};
 
 use crate::format::SchemaElement;
 
diff --git a/parquet_derive/src/parquet_field.rs 
b/parquet_derive/src/parquet_field.rs
index 8d759d11c4b..3ab85a8972f 100644
--- a/parquet_derive/src/parquet_field.rs
+++ b/parquet_derive/src/parquet_field.rs
@@ -801,7 +801,7 @@ impl Type {
 #[cfg(test)]
 mod test {
     use super::*;
-    use syn::{self, Data, DataStruct, DeriveInput};
+    use syn::{Data, DataStruct, DeriveInput};
 
     fn extract_fields(input: proc_macro2::TokenStream) -> Vec<syn::Field> {
         let input: DeriveInput = syn::parse2(input).unwrap();

Reply via email to