This is an automated email from the ASF dual-hosted git repository.

alamb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git


The following commit(s) were added to refs/heads/master by this push:
     new b9e4497258 Rename `Schema::all_fields` to `flattened_fields` (#6001)
b9e4497258 is described below

commit b9e449725863a214a935b27b8b7056133f13fe6c
Author: 张林伟 <[email protected]>
AuthorDate: Tue Jul 9 02:55:23 2024 +0800

    Rename `Schema::all_fields` to `flattened_fields` (#6001)
    
    * Rename Schema::all_fields to flattened_fields
    
    * Add doc example for Schema::flattened_fields
    
    * fmt doc example
    
    * Update arrow-schema/src/schema.rs
    
    ---------
    
    Co-authored-by: Andrew Lamb <[email protected]>
---
 arrow-flight/tests/flight_sql_client_cli.rs |  2 +-
 arrow-ipc/src/writer.rs                     |  2 +-
 arrow-json/src/reader/mod.rs                |  2 +-
 arrow-schema/src/schema.rs                  | 44 ++++++++++++++++++++++++++++-
 parquet/src/arrow/async_reader/mod.rs       |  2 +-
 5 files changed, 47 insertions(+), 5 deletions(-)

diff --git a/arrow-flight/tests/flight_sql_client_cli.rs 
b/arrow-flight/tests/flight_sql_client_cli.rs
index 50a4ec0d8c..317eb39004 100644
--- a/arrow-flight/tests/flight_sql_client_cli.rs
+++ b/arrow-flight/tests/flight_sql_client_cli.rs
@@ -568,7 +568,7 @@ impl FlightSqlService for FlightSqlServiceImpl {
         .try_collect::<Vec<_>>()
         .await?;
 
-        for (left, right) in 
parameters[0].schema().all_fields().iter().zip(vec![
+        for (left, right) in 
parameters[0].schema().flattened_fields().iter().zip(vec![
             Field::new("$1", DataType::Utf8, false),
             Field::new("$2", DataType::Int64, true),
         ]) {
diff --git a/arrow-ipc/src/writer.rs b/arrow-ipc/src/writer.rs
index c078219599..d0a78ca270 100644
--- a/arrow-ipc/src/writer.rs
+++ b/arrow-ipc/src/writer.rs
@@ -411,7 +411,7 @@ impl IpcDataGenerator {
         write_options: &IpcWriteOptions,
     ) -> Result<(Vec<EncodedData>, EncodedData), ArrowError> {
         let schema = batch.schema();
-        let mut encoded_dictionaries = 
Vec::with_capacity(schema.all_fields().len());
+        let mut encoded_dictionaries = 
Vec::with_capacity(schema.flattened_fields().len());
 
         let mut dict_id = dictionary_tracker.dict_ids.clone().into_iter();
 
diff --git a/arrow-json/src/reader/mod.rs b/arrow-json/src/reader/mod.rs
index 0a50cfac65..3e1c5d2fc8 100644
--- a/arrow-json/src/reader/mod.rs
+++ b/arrow-json/src/reader/mod.rs
@@ -289,7 +289,7 @@ impl ReaderBuilder {
 
         let decoder = make_decoder(data_type, self.coerce_primitive, 
self.strict_mode, nullable)?;
 
-        let num_fields = self.schema.all_fields().len();
+        let num_fields = self.schema.flattened_fields().len();
 
         Ok(Decoder {
             decoder,
diff --git a/arrow-schema/src/schema.rs b/arrow-schema/src/schema.rs
index 3bb076aa54..9a9ef45d8b 100644
--- a/arrow-schema/src/schema.rs
+++ b/arrow-schema/src/schema.rs
@@ -324,11 +324,53 @@ impl Schema {
     }
 
     /// Returns a vector with references to all fields (including nested 
fields)
+    ///
+    /// # Example
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    /// use arrow_schema::{DataType, Field, Fields, Schema};
+    ///
+    /// let f1 = Arc::new(Field::new("a", DataType::Boolean, false));
+    ///
+    /// let f2_inner = Arc::new(Field::new("b_inner", DataType::Int8, false));
+    /// let f2 = Arc::new(Field::new("b", DataType::List(f2_inner.clone()), 
false));
+    ///
+    /// let f3_inner1 = Arc::new(Field::new("c_inner1", DataType::Int8, 
false));
+    /// let f3_inner2 = Arc::new(Field::new("c_inner2", DataType::Int8, 
false));
+    /// let f3 = Arc::new(Field::new(
+    ///     "c",
+    ///     DataType::Struct(vec![f3_inner1.clone(), 
f3_inner2.clone()].into()),
+    ///     false
+    /// ));
+    ///
+    /// let mut schema = Schema::new(vec![
+    ///   f1.clone(), f2.clone(), f3.clone()
+    /// ]);
+    /// assert_eq!(
+    ///     schema.flattened_fields(),
+    ///     vec![
+    ///         f1.as_ref(),
+    ///         f2.as_ref(),
+    ///         f2_inner.as_ref(),
+    ///         f3.as_ref(),
+    ///         f3_inner1.as_ref(),
+    ///         f3_inner2.as_ref()
+    ///    ]
+    /// );
+    /// ```
     #[inline]
-    pub fn all_fields(&self) -> Vec<&Field> {
+    pub fn flattened_fields(&self) -> Vec<&Field> {
         self.fields.iter().flat_map(|f| f.fields()).collect()
     }
 
+    /// Returns a vector with references to all fields (including nested 
fields)
+    #[deprecated(since = "52.2.0", note = "Use `flattened_fields` instead")]
+    #[inline]
+    pub fn all_fields(&self) -> Vec<&Field> {
+        self.flattened_fields()
+    }
+
     /// Returns an immutable reference of a specific [`Field`] instance 
selected using an
     /// offset within the internal `fields` vector.
     ///
diff --git a/parquet/src/arrow/async_reader/mod.rs 
b/parquet/src/arrow/async_reader/mod.rs
index 0a72583b90..e4205b7ef2 100644
--- a/parquet/src/arrow/async_reader/mod.rs
+++ b/parquet/src/arrow/async_reader/mod.rs
@@ -1650,7 +1650,7 @@ mod tests {
     #[tokio::test]
     async fn test_parquet_record_batch_stream_schema() {
         fn get_all_field_names(schema: &Schema) -> Vec<&String> {
-            schema.all_fields().iter().map(|f| f.name()).collect()
+            schema.flattened_fields().iter().map(|f| f.name()).collect()
         }
 
         // ParquetRecordBatchReaderBuilder::schema differs from

Reply via email to