houqp commented on a change in pull request #9256:
URL: https://github.com/apache/arrow/pull/9256#discussion_r568340019



##########
File path: rust/arrow/src/json/writer.rs
##########
@@ -0,0 +1,635 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! JSON Writer
+//!
+//! This JSON writer allows converting Arrow record batches into array of JSON 
objects. It also
+//! provides a Writer struct to help serialize record batches directly into 
line-delimited JSON
+//! objects as bytes.
+//!
+//! Serialize record batches into array of JSON objects:
+//!
+//! ```
+//! use std::sync::Arc;
+//!
+//! use arrow::array::Int32Array;
+//! use arrow::datatypes::{DataType, Field, Schema};
+//! use arrow::json;
+//! use arrow::record_batch::RecordBatch;
+//!
+//! let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
+//! let a = Int32Array::from(vec![1, 2, 3]);
+//! let batch = RecordBatch::try_new(Arc::new(schema), 
vec![Arc::new(a)]).unwrap();
+//!
+//! let json_rows = json::writer::record_batches_to_json_rows(&[batch]);
+//! assert_eq!(
+//!     serde_json::Value::Object(json_rows[1].clone()),
+//!     serde_json::json!({"a": 2}),
+//! );
+//! ```
+//!
+//! Serialize record batches into line-delimited JSON bytes:
+//!
+//! ```
+//! use std::sync::Arc;
+//!
+//! use arrow::array::Int32Array;
+//! use arrow::datatypes::{DataType, Field, Schema};
+//! use arrow::json;
+//! use arrow::record_batch::RecordBatch;
+//!
+//! let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
+//! let a = Int32Array::from(vec![1, 2, 3]);
+//! let batch = RecordBatch::try_new(Arc::new(schema), 
vec![Arc::new(a)]).unwrap();
+//!
+//! let buf = Vec::new();
+//! let mut writer = json::Writer::new(buf);
+//! writer.write_batches(&vec![batch]).unwrap();
+//! ```
+
+use std::io::{BufWriter, Write};
+use std::iter;
+
+use serde_json::map::Map as JsonMap;
+use serde_json::Value;
+
+use crate::array::*;
+use crate::datatypes::*;
+use crate::error::Result;
+use crate::record_batch::RecordBatch;
+
+fn primitive_array_to_json<T: ArrowPrimitiveType>(array: &ArrayRef) -> 
Vec<Value> {
+    as_primitive_array::<T>(array)
+        .iter()
+        .map(|maybe_value| match maybe_value {
+            Some(v) => v.into_json_value().unwrap_or(Value::Null),
+            None => Value::Null,
+        })
+        .collect()
+}
+
+fn struct_array_to_jsonmap_array(
+    array: &StructArray,
+    row_count: usize,
+) -> Vec<JsonMap<String, Value>> {
+    let inner_col_names = array.column_names();
+
+    let mut inner_objs = iter::repeat(JsonMap::new())
+        .take(row_count)
+        .collect::<Vec<JsonMap<String, Value>>>();
+
+    array
+        .columns()
+        .iter()
+        .enumerate()
+        .for_each(|(j, struct_col)| {
+            set_column_for_json_rows(
+                &mut inner_objs,
+                row_count,
+                struct_col,
+                inner_col_names[j],
+            );
+        });
+
+    inner_objs
+}
+
+pub fn array_to_json_array(array: &ArrayRef) -> Vec<Value> {
+    match array.data_type() {
+        DataType::Null => 
iter::repeat(Value::Null).take(array.len()).collect(),
+        DataType::Boolean => as_boolean_array(array)
+            .iter()
+            .map(|maybe_value| match maybe_value {
+                Some(v) => v.into(),
+                None => Value::Null,
+            })
+            .collect(),
+
+        DataType::Utf8 => as_string_array(array)
+            .iter()
+            .map(|maybe_value| match maybe_value {
+                Some(v) => v.into(),
+                None => Value::Null,
+            })
+            .collect(),
+        DataType::Int8 => primitive_array_to_json::<Int8Type>(array),
+        DataType::Int16 => primitive_array_to_json::<Int16Type>(array),
+        DataType::Int32 => primitive_array_to_json::<Int32Type>(array),
+        DataType::Int64 => primitive_array_to_json::<Int64Type>(array),
+        DataType::UInt8 => primitive_array_to_json::<UInt8Type>(array),
+        DataType::UInt16 => primitive_array_to_json::<UInt16Type>(array),
+        DataType::UInt32 => primitive_array_to_json::<UInt32Type>(array),
+        DataType::UInt64 => primitive_array_to_json::<UInt64Type>(array),
+        DataType::Float32 => primitive_array_to_json::<Float32Type>(array),
+        DataType::Float64 => primitive_array_to_json::<Float64Type>(array),
+        DataType::List(_) => as_list_array(array)
+            .iter()
+            .map(|maybe_value| match maybe_value {
+                Some(v) => Value::Array(array_to_json_array(&v)),
+                None => Value::Null,
+            })
+            .collect(),
+        DataType::Struct(_) => {
+            let jsonmaps =
+                struct_array_to_jsonmap_array(as_struct_array(array), 
array.len());
+            jsonmaps.into_iter().map(Value::Object).collect()
+        }
+        _ => {

Review comment:
       yeah, i was planning to add dictionary support, but this patch set 
already grew larger than I expected, so I decided to leave that to a follow up 
PR for easier review.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to