eejbyfeldt commented on code in PR #805:
URL: https://github.com/apache/datafusion-comet/pull/805#discussion_r1715844690


##########
docs/source/user-guide/expressions.md:
##########
@@ -182,6 +182,14 @@ The following Spark expressions are currently available. 
Any known compatibility
 | VariancePop   |       |
 | VarianceSamp  |       |
 
+## Complex Types
+
+| Expression        | Notes                             |
+| ----------------- | --------------------------------- |
+| CreateNamedStruct | Create a struct                   |
+| GetElementAt      | Access a field in a struct        |
+| StructsToJson     | Convert a struct to a JSON string |
+

Review Comment:
   Nit: In other parts of this document the `Notes` section is only used to 
document compatibility issues and/or limitations. Should we follow that here as 
well? I think the expressions names are mostly self describing and the extra 
comment does not really add that much. (Maybe `GetElementAt` it a bit unclear 
what it maps to in Spark/SQL)
   



##########
native/spark-expr/src/to_json.rs:
##########
@@ -0,0 +1,295 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// TODO upstream this to DataFusion as long as we have a way to specify all
+// of the Spark-specific compatibility features that we need (including
+// being able to specify Spark-compatible cast from all types to string)
+
+use crate::{spark_cast, EvalMode};
+use arrow_array::builder::StringBuilder;
+use arrow_array::{Array, ArrayRef, RecordBatch, StringArray, StructArray};
+use arrow_schema::{DataType, Schema};
+use datafusion_common::Result;
+use datafusion_expr::ColumnarValue;
+use datafusion_physical_expr_common::physical_expr::PhysicalExpr;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::hash::{Hash, Hasher};
+use std::sync::Arc;
+
+/// to_json function
+#[derive(Debug, Hash)]
+pub struct ToJson {
+    /// The input to convert to JSON
+    expr: Arc<dyn PhysicalExpr>,
+    /// Timezone to use when converting timestamps to JSON
+    timezone: String,
+}
+
+impl ToJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, timezone: &str) -> Self {
+        Self {
+            expr,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for ToJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(f, "to_json({}, timezone={})", self.expr, self.timezone)
+    }
+}
+
+impl PartialEq<dyn Any> for ToJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<ToJson>() {
+            self.expr.eq(&other.expr) && self.timezone.eq(&other.timezone)
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for ToJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(DataType::Utf8)
+    }
+
+    fn nullable(&self, input_schema: &Schema) -> Result<bool> {
+        self.expr.nullable(input_schema)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(array_to_json_string(
+            &input,
+            &self.timezone,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(children[0].clone(), &self.timezone)))
+    }
+
+    fn dyn_hash(&self, state: &mut dyn Hasher) {
+        let mut s = state;
+        self.expr.hash(&mut s);
+        self.timezone.hash(&mut s);
+        self.hash(&mut s);
+    }
+}
+
+/// Convert an array into a JSON value string representation
+fn array_to_json_string(arr: &Arc<dyn Array>, timezone: &str) -> 
Result<ArrayRef> {
+    if let Some(struct_array) = arr.as_any().downcast_ref::<StructArray>() {
+        struct_to_json(struct_array, timezone)
+    } else {
+        spark_cast(
+            ColumnarValue::Array(Arc::clone(arr)),
+            &DataType::Utf8,
+            EvalMode::Legacy,
+            timezone,
+        )?
+        .into_array(arr.len())
+    }
+}
+
+fn struct_to_json(array: &StructArray, timezone: &str) -> Result<ArrayRef> {
+    // get field names
+    let field_names: Vec<String> = array.fields().iter().map(|f| 
f.name().clone()).collect();
+    // determine which fields need to have their values quoted
+    let quotes_needed: Vec<bool> = array
+        .fields()
+        .iter()
+        .map(|f| match f.data_type() {
+            DataType::Utf8 | DataType::LargeUtf8 => true,
+            DataType::Dictionary(_, dt) => {
+                matches!(dt.as_ref(), DataType::Utf8 | DataType::LargeUtf8)
+            }
+            _ => false,
+        })
+        .collect();
+    // create JSON string representation of each column
+    let string_arrays: Vec<ArrayRef> = array
+        .columns()
+        .iter()
+        .map(|arr| array_to_json_string(arr, timezone))
+        .collect::<Result<Vec<_>>>()?;
+    let string_arrays: Vec<&StringArray> = string_arrays
+        .iter()
+        .map(|arr| {
+            arr.as_any()
+                .downcast_ref::<StringArray>()
+                .expect("string array")
+        })
+        .collect();
+    // build the JSON string containing entries in the format 
`"field_name":field_value`
+    let mut builder = StringBuilder::with_capacity(array.len(), array.len() * 
16);
+    let mut json = String::with_capacity(array.len() * 16);
+    for row_index in 0..array.len() {
+        if array.is_null(row_index) {
+            builder.append_null();
+        } else {
+            json.clear();
+            let mut any_fields_written = false;
+            json.push('{');
+            for col_index in 0..string_arrays.len() {
+                if !string_arrays[col_index].is_null(row_index) {
+                    if any_fields_written {
+                        json.push(',');
+                    }
+                    // quoted field name
+                    json.push('"');
+                    json.push_str(&field_names[col_index]);
+                    json.push_str("\":");
+                    // value
+                    if quotes_needed[col_index] {
+                        json.push('"');
+                    }
+                    json.push_str(string_arrays[col_index].value(row_index));
+                    if quotes_needed[col_index] {
+                        json.push('"');
+                    }

Review Comment:
   I think there is an issue here if the value in 
`string_arrays[col_index].value(row_index)` contains a literal `"` character. I 
think spark (or some other json library) would escape such characters.
   
   There are probably also other things like newlines and tabs that also needs 
to be handled.
   



##########
spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala:
##########
@@ -1210,6 +1210,58 @@ object QueryPlanSerde extends Logging with 
ShimQueryPlanSerde with CometExprShim
             None
           }
 
+        case StructsToJson(options, child, timezoneId) =>
+          if (options.nonEmpty) {
+            withInfo(expr, "StructsToJson with options is not supported")
+            None
+          } else {
+
+            def isSupportedType(dt: DataType): Boolean = {
+              dt match {
+                case StructType(fields) =>
+                  fields.forall(f => isSupportedType(f.dataType))
+                case DataTypes.BooleanType | DataTypes.ByteType | 
DataTypes.ShortType |
+                    DataTypes.IntegerType | DataTypes.LongType | 
DataTypes.FloatType |
+                    DataTypes.DoubleType | DataTypes.StringType =>
+                  true
+                case DataTypes.DateType | DataTypes.TimestampType =>
+                  // TODO implement these types with tests for formatting 
options and timezone
+                  false
+                case _ => false
+              }
+            }
+
+            val isSupported = child.dataType match {
+              case s: StructType =>
+                s.fields.forall(f => isSupportedType(f.dataType))
+              case _ =>

Review Comment:
   My reading of the spark code for this expression here: 
https://github.com/apache/spark/blob/bfddd53d98da866b474464321e5b323a3df32e81/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala#L832-L833
   
   Nit: Is that despite the name being `Structs` it also handles `Map`, 
`Arrays` should we mention that as a TODO here?



##########
native/spark-expr/src/to_json.rs:
##########
@@ -0,0 +1,295 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// TODO upstream this to DataFusion as long as we have a way to specify all
+// of the Spark-specific compatibility features that we need (including
+// being able to specify Spark-compatible cast from all types to string)
+
+use crate::{spark_cast, EvalMode};
+use arrow_array::builder::StringBuilder;
+use arrow_array::{Array, ArrayRef, RecordBatch, StringArray, StructArray};
+use arrow_schema::{DataType, Schema};
+use datafusion_common::Result;
+use datafusion_expr::ColumnarValue;
+use datafusion_physical_expr_common::physical_expr::PhysicalExpr;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::hash::{Hash, Hasher};
+use std::sync::Arc;
+
+/// to_json function
+#[derive(Debug, Hash)]
+pub struct ToJson {
+    /// The input to convert to JSON
+    expr: Arc<dyn PhysicalExpr>,
+    /// Timezone to use when converting timestamps to JSON
+    timezone: String,
+}
+
+impl ToJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, timezone: &str) -> Self {
+        Self {
+            expr,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for ToJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(f, "to_json({}, timezone={})", self.expr, self.timezone)
+    }
+}
+
+impl PartialEq<dyn Any> for ToJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<ToJson>() {
+            self.expr.eq(&other.expr) && self.timezone.eq(&other.timezone)
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for ToJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(DataType::Utf8)
+    }
+
+    fn nullable(&self, input_schema: &Schema) -> Result<bool> {
+        self.expr.nullable(input_schema)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(array_to_json_string(
+            &input,
+            &self.timezone,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(children[0].clone(), &self.timezone)))
+    }
+
+    fn dyn_hash(&self, state: &mut dyn Hasher) {
+        let mut s = state;
+        self.expr.hash(&mut s);
+        self.timezone.hash(&mut s);
+        self.hash(&mut s);
+    }
+}
+
+/// Convert an array into a JSON value string representation
+fn array_to_json_string(arr: &Arc<dyn Array>, timezone: &str) -> 
Result<ArrayRef> {
+    if let Some(struct_array) = arr.as_any().downcast_ref::<StructArray>() {
+        struct_to_json(struct_array, timezone)
+    } else {
+        spark_cast(
+            ColumnarValue::Array(Arc::clone(arr)),
+            &DataType::Utf8,
+            EvalMode::Legacy,
+            timezone,
+        )?
+        .into_array(arr.len())
+    }
+}
+
+fn struct_to_json(array: &StructArray, timezone: &str) -> Result<ArrayRef> {
+    // get field names
+    let field_names: Vec<String> = array.fields().iter().map(|f| 
f.name().clone()).collect();

Review Comment:
   This looks like it creates some uneccessary copies of the field names. Any 
reason to not change this to
   ```suggestion
       let fields = array.fields();
   ```
   and the usage site then becomes
   ```
   json.push_str(field_names[col_index].name());
   ```
   
   Or is there some reason to make copies that I am missing?



##########
native/spark-expr/src/to_json.rs:
##########
@@ -0,0 +1,295 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// TODO upstream this to DataFusion as long as we have a way to specify all
+// of the Spark-specific compatibility features that we need (including
+// being able to specify Spark-compatible cast from all types to string)
+
+use crate::{spark_cast, EvalMode};
+use arrow_array::builder::StringBuilder;
+use arrow_array::{Array, ArrayRef, RecordBatch, StringArray, StructArray};
+use arrow_schema::{DataType, Schema};
+use datafusion_common::Result;
+use datafusion_expr::ColumnarValue;
+use datafusion_physical_expr_common::physical_expr::PhysicalExpr;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::hash::{Hash, Hasher};
+use std::sync::Arc;
+
+/// to_json function
+#[derive(Debug, Hash)]
+pub struct ToJson {
+    /// The input to convert to JSON
+    expr: Arc<dyn PhysicalExpr>,
+    /// Timezone to use when converting timestamps to JSON
+    timezone: String,
+}
+
+impl ToJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, timezone: &str) -> Self {
+        Self {
+            expr,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for ToJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(f, "to_json({}, timezone={})", self.expr, self.timezone)
+    }
+}
+
+impl PartialEq<dyn Any> for ToJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<ToJson>() {
+            self.expr.eq(&other.expr) && self.timezone.eq(&other.timezone)
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for ToJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(DataType::Utf8)
+    }
+
+    fn nullable(&self, input_schema: &Schema) -> Result<bool> {
+        self.expr.nullable(input_schema)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(array_to_json_string(
+            &input,
+            &self.timezone,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(children[0].clone(), &self.timezone)))
+    }
+
+    fn dyn_hash(&self, state: &mut dyn Hasher) {
+        let mut s = state;
+        self.expr.hash(&mut s);
+        self.timezone.hash(&mut s);
+        self.hash(&mut s);
+    }
+}
+
+/// Convert an array into a JSON value string representation
+fn array_to_json_string(arr: &Arc<dyn Array>, timezone: &str) -> 
Result<ArrayRef> {
+    if let Some(struct_array) = arr.as_any().downcast_ref::<StructArray>() {
+        struct_to_json(struct_array, timezone)
+    } else {
+        spark_cast(
+            ColumnarValue::Array(Arc::clone(arr)),
+            &DataType::Utf8,
+            EvalMode::Legacy,
+            timezone,
+        )?
+        .into_array(arr.len())
+    }
+}
+
+fn struct_to_json(array: &StructArray, timezone: &str) -> Result<ArrayRef> {
+    // get field names
+    let field_names: Vec<String> = array.fields().iter().map(|f| 
f.name().clone()).collect();
+    // determine which fields need to have their values quoted
+    let quotes_needed: Vec<bool> = array
+        .fields()
+        .iter()
+        .map(|f| match f.data_type() {
+            DataType::Utf8 | DataType::LargeUtf8 => true,
+            DataType::Dictionary(_, dt) => {
+                matches!(dt.as_ref(), DataType::Utf8 | DataType::LargeUtf8)
+            }
+            _ => false,
+        })
+        .collect();
+    // create JSON string representation of each column
+    let string_arrays: Vec<ArrayRef> = array
+        .columns()
+        .iter()
+        .map(|arr| array_to_json_string(arr, timezone))
+        .collect::<Result<Vec<_>>>()?;
+    let string_arrays: Vec<&StringArray> = string_arrays
+        .iter()
+        .map(|arr| {
+            arr.as_any()
+                .downcast_ref::<StringArray>()
+                .expect("string array")
+        })
+        .collect();
+    // build the JSON string containing entries in the format 
`"field_name":field_value`
+    let mut builder = StringBuilder::with_capacity(array.len(), array.len() * 
16);
+    let mut json = String::with_capacity(array.len() * 16);
+    for row_index in 0..array.len() {
+        if array.is_null(row_index) {
+            builder.append_null();
+        } else {
+            json.clear();
+            let mut any_fields_written = false;
+            json.push('{');
+            for col_index in 0..string_arrays.len() {
+                if !string_arrays[col_index].is_null(row_index) {
+                    if any_fields_written {
+                        json.push(',');
+                    }
+                    // quoted field name
+                    json.push('"');
+                    json.push_str(&field_names[col_index]);

Review Comment:
   The field_name also needs to be escaped if it contains problematic chars.



##########
native/spark-expr/src/to_json.rs:
##########
@@ -0,0 +1,295 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// TODO upstream this to DataFusion as long as we have a way to specify all
+// of the Spark-specific compatibility features that we need (including
+// being able to specify Spark-compatible cast from all types to string)
+
+use crate::{spark_cast, EvalMode};
+use arrow_array::builder::StringBuilder;
+use arrow_array::{Array, ArrayRef, RecordBatch, StringArray, StructArray};
+use arrow_schema::{DataType, Schema};
+use datafusion_common::Result;
+use datafusion_expr::ColumnarValue;
+use datafusion_physical_expr_common::physical_expr::PhysicalExpr;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::hash::{Hash, Hasher};
+use std::sync::Arc;
+
+/// to_json function
+#[derive(Debug, Hash)]
+pub struct ToJson {
+    /// The input to convert to JSON
+    expr: Arc<dyn PhysicalExpr>,
+    /// Timezone to use when converting timestamps to JSON
+    timezone: String,
+}
+
+impl ToJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, timezone: &str) -> Self {
+        Self {
+            expr,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for ToJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(f, "to_json({}, timezone={})", self.expr, self.timezone)
+    }
+}
+
+impl PartialEq<dyn Any> for ToJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<ToJson>() {
+            self.expr.eq(&other.expr) && self.timezone.eq(&other.timezone)
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for ToJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(DataType::Utf8)
+    }
+
+    fn nullable(&self, input_schema: &Schema) -> Result<bool> {
+        self.expr.nullable(input_schema)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(array_to_json_string(
+            &input,
+            &self.timezone,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(children[0].clone(), &self.timezone)))
+    }
+
+    fn dyn_hash(&self, state: &mut dyn Hasher) {
+        let mut s = state;
+        self.expr.hash(&mut s);
+        self.timezone.hash(&mut s);
+        self.hash(&mut s);
+    }
+}
+
+/// Convert an array into a JSON value string representation
+fn array_to_json_string(arr: &Arc<dyn Array>, timezone: &str) -> 
Result<ArrayRef> {
+    if let Some(struct_array) = arr.as_any().downcast_ref::<StructArray>() {
+        struct_to_json(struct_array, timezone)
+    } else {
+        spark_cast(
+            ColumnarValue::Array(Arc::clone(arr)),
+            &DataType::Utf8,
+            EvalMode::Legacy,
+            timezone,
+        )?
+        .into_array(arr.len())
+    }
+}
+
+fn struct_to_json(array: &StructArray, timezone: &str) -> Result<ArrayRef> {
+    // get field names
+    let field_names: Vec<String> = array.fields().iter().map(|f| 
f.name().clone()).collect();
+    // determine which fields need to have their values quoted
+    let quotes_needed: Vec<bool> = array
+        .fields()
+        .iter()
+        .map(|f| match f.data_type() {
+            DataType::Utf8 | DataType::LargeUtf8 => true,
+            DataType::Dictionary(_, dt) => {
+                matches!(dt.as_ref(), DataType::Utf8 | DataType::LargeUtf8)
+            }
+            _ => false,
+        })
+        .collect();
+    // create JSON string representation of each column
+    let string_arrays: Vec<ArrayRef> = array
+        .columns()
+        .iter()
+        .map(|arr| array_to_json_string(arr, timezone))
+        .collect::<Result<Vec<_>>>()?;
+    let string_arrays: Vec<&StringArray> = string_arrays
+        .iter()
+        .map(|arr| {
+            arr.as_any()
+                .downcast_ref::<StringArray>()
+                .expect("string array")
+        })
+        .collect();
+    // build the JSON string containing entries in the format 
`"field_name":field_value`
+    let mut builder = StringBuilder::with_capacity(array.len(), array.len() * 
16);
+    let mut json = String::with_capacity(array.len() * 16);
+    for row_index in 0..array.len() {
+        if array.is_null(row_index) {
+            builder.append_null();
+        } else {
+            json.clear();
+            let mut any_fields_written = false;
+            json.push('{');
+            for col_index in 0..string_arrays.len() {
+                if !string_arrays[col_index].is_null(row_index) {
+                    if any_fields_written {
+                        json.push(',');
+                    }
+                    // quoted field name
+                    json.push('"');
+                    json.push_str(&field_names[col_index]);
+                    json.push_str("\":");
+                    // value
+                    if quotes_needed[col_index] {
+                        json.push('"');
+                    }
+                    json.push_str(string_arrays[col_index].value(row_index));
+                    if quotes_needed[col_index] {
+                        json.push('"');
+                    }
+                    any_fields_written = true;
+                }
+            }
+            json.push('}');
+            builder.append_value(json.clone());

Review Comment:
   I belive this copy is not needed.
   ```suggestion
               builder.append_value(&json);
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org
For additional commands, e-mail: github-h...@datafusion.apache.org

Reply via email to