mbutrovich commented on code in PR #2934:
URL: https://github.com/apache/datafusion-comet/pull/2934#discussion_r2632006238


##########
spark/src/main/scala/org/apache/comet/serde/structs.scala:
##########
@@ -167,3 +167,67 @@ object CometStructsToJson extends 
CometExpressionSerde[StructsToJson] {
     }
   }
 }
+
+object CometJsonToStructs extends CometExpressionSerde[JsonToStructs] {
+
+  override def getSupportLevel(expr: JsonToStructs): SupportLevel = {
+    // this feature is partially implemented and not comprehensively tested yet

Review Comment:
   Do you have criteria for when this would change? You allude to arrays, maps, 
and fuzz tests in the PR description.



##########
native/spark-expr/src/json_funcs/from_json.rs:
##########
@@ -0,0 +1,652 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use arrow::array::{
+    Array, ArrayRef, BooleanBuilder, Float32Builder, Float64Builder, 
Int32Builder, Int64Builder,
+    RecordBatch, StringBuilder, StructArray,
+};
+use arrow::datatypes::{DataType, Field, Schema};
+use datafusion::common::Result;
+use datafusion::physical_expr::PhysicalExpr;
+use datafusion::physical_plan::ColumnarValue;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::sync::Arc;
+
+/// from_json function - parses JSON strings into structured types
+#[derive(Debug, Eq)]
+pub struct FromJson {
+    /// The JSON string input expression
+    expr: Arc<dyn PhysicalExpr>,
+    /// Target schema for parsing
+    schema: DataType,
+    /// Timezone for timestamp parsing (future use)
+    timezone: String,
+}
+
+impl PartialEq for FromJson {
+    fn eq(&self, other: &Self) -> bool {
+        self.expr.eq(&other.expr) && self.schema == other.schema && 
self.timezone == other.timezone
+    }
+}
+
+impl std::hash::Hash for FromJson {
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.expr.hash(state);
+        // Note: DataType doesn't implement Hash, so we hash its debug 
representation
+        format!("{:?}", self.schema).hash(state);
+        self.timezone.hash(state);
+    }
+}
+
+impl FromJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, schema: DataType, timezone: &str) 
-> Self {
+        Self {
+            expr,
+            schema,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for FromJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "from_json({}, schema={:?}, timezone={})",
+            self.expr, self.schema, self.timezone
+        )
+    }
+}
+
+impl PartialEq<dyn Any> for FromJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<FromJson>() {
+            self.expr.eq(&other.expr)
+                && self.schema == other.schema
+                && self.timezone == other.timezone
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for FromJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn fmt_sql(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
+        unimplemented!()
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(self.schema.clone())
+    }
+
+    fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
+        // Always nullable - parse errors return null in PERMISSIVE mode
+        Ok(true)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(json_string_to_struct(
+            &input,
+            &self.schema,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(
+            Arc::clone(&children[0]),
+            self.schema.clone(),
+            &self.timezone,
+        )))
+    }
+}
+
+/// Parse JSON string array into struct array
+fn json_string_to_struct(arr: &Arc<dyn Array>, schema: &DataType) -> 
Result<ArrayRef> {
+    use arrow::array::StringArray;
+    use arrow::buffer::NullBuffer;
+
+    // Input must be string array
+    let string_array = 
arr.as_any().downcast_ref::<StringArray>().ok_or_else(|| {
+        datafusion::common::DataFusionError::Execution("from_json expects 
string input".to_string())
+    })?;
+
+    // Schema must be struct
+    let DataType::Struct(fields) = schema else {
+        return Err(datafusion::common::DataFusionError::Execution(
+            "from_json requires struct schema".to_string(),
+        ));
+    };
+
+    // Build struct array by parsing each JSON string
+    let num_rows = string_array.len();
+
+    // Create builders for each field

Review Comment:
   "What" not "why" comment is not needed IMO.



##########
native/spark-expr/src/json_funcs/from_json.rs:
##########
@@ -0,0 +1,652 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use arrow::array::{
+    Array, ArrayRef, BooleanBuilder, Float32Builder, Float64Builder, 
Int32Builder, Int64Builder,
+    RecordBatch, StringBuilder, StructArray,
+};
+use arrow::datatypes::{DataType, Field, Schema};
+use datafusion::common::Result;
+use datafusion::physical_expr::PhysicalExpr;
+use datafusion::physical_plan::ColumnarValue;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::sync::Arc;
+
+/// from_json function - parses JSON strings into structured types
+#[derive(Debug, Eq)]
+pub struct FromJson {
+    /// The JSON string input expression
+    expr: Arc<dyn PhysicalExpr>,
+    /// Target schema for parsing
+    schema: DataType,
+    /// Timezone for timestamp parsing (future use)
+    timezone: String,
+}
+
+impl PartialEq for FromJson {
+    fn eq(&self, other: &Self) -> bool {
+        self.expr.eq(&other.expr) && self.schema == other.schema && 
self.timezone == other.timezone
+    }
+}
+
+impl std::hash::Hash for FromJson {
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.expr.hash(state);
+        // Note: DataType doesn't implement Hash, so we hash its debug 
representation
+        format!("{:?}", self.schema).hash(state);
+        self.timezone.hash(state);
+    }
+}
+
+impl FromJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, schema: DataType, timezone: &str) 
-> Self {
+        Self {
+            expr,
+            schema,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for FromJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "from_json({}, schema={:?}, timezone={})",
+            self.expr, self.schema, self.timezone
+        )
+    }
+}
+
+impl PartialEq<dyn Any> for FromJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<FromJson>() {
+            self.expr.eq(&other.expr)
+                && self.schema == other.schema
+                && self.timezone == other.timezone
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for FromJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn fmt_sql(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
+        unimplemented!()
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(self.schema.clone())
+    }
+
+    fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
+        // Always nullable - parse errors return null in PERMISSIVE mode
+        Ok(true)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(json_string_to_struct(
+            &input,
+            &self.schema,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(
+            Arc::clone(&children[0]),
+            self.schema.clone(),
+            &self.timezone,
+        )))
+    }
+}
+
+/// Parse JSON string array into struct array
+fn json_string_to_struct(arr: &Arc<dyn Array>, schema: &DataType) -> 
Result<ArrayRef> {
+    use arrow::array::StringArray;
+    use arrow::buffer::NullBuffer;
+
+    // Input must be string array
+    let string_array = 
arr.as_any().downcast_ref::<StringArray>().ok_or_else(|| {
+        datafusion::common::DataFusionError::Execution("from_json expects 
string input".to_string())
+    })?;
+
+    // Schema must be struct
+    let DataType::Struct(fields) = schema else {
+        return Err(datafusion::common::DataFusionError::Execution(
+            "from_json requires struct schema".to_string(),
+        ));
+    };
+
+    // Build struct array by parsing each JSON string
+    let num_rows = string_array.len();
+
+    // Create builders for each field
+    let mut field_builders = create_field_builders(fields, num_rows)?;
+
+    // Track which rows should be null at the struct level
+    let mut struct_nulls = vec![true; num_rows];
+
+    // Parse each row
+    for (row_idx, struct_null) in struct_nulls.iter_mut().enumerate() {
+        if string_array.is_null(row_idx) {
+            // Null input -> null struct
+            *struct_null = false;
+            append_null_to_all_builders(&mut field_builders);
+        } else {
+            let json_str = string_array.value(row_idx);
+
+            // Parse JSON (PERMISSIVE mode: return null fields on error)
+            match serde_json::from_str::<serde_json::Value>(json_str) {
+                Ok(json_value) => {
+                    if let serde_json::Value::Object(obj) = json_value {
+                        // Struct is not null, extract each field
+                        *struct_null = true;
+                        for (field, builder) in 
fields.iter().zip(field_builders.iter_mut()) {
+                            let field_value = obj.get(field.name());
+                            append_field_value(builder, field, field_value)?;
+                        }
+                    } else {
+                        // Not an object -> struct with null fields
+                        *struct_null = true;
+                        append_null_to_all_builders(&mut field_builders);
+                    }
+                }
+                Err(_) => {
+                    // Parse error -> struct with null fields (PERMISSIVE mode)
+                    *struct_null = true;
+                    append_null_to_all_builders(&mut field_builders);
+                }
+            }
+        }
+    }
+
+    // Finish builders
+    let arrays: Vec<ArrayRef> = field_builders
+        .into_iter()
+        .map(finish_builder)
+        .collect::<Result<Vec<_>>>()?;
+
+    // Create null buffer from struct_nulls

Review Comment:
   "what" not "why"



##########
native/core/src/execution/expressions/strings.rs:
##########
@@ -98,3 +99,28 @@ impl ExpressionBuilder for RlikeBuilder {
         }
     }
 }
+
+/// Builder for FromJson expressions

Review Comment:
   Not a terribly useful comment IMO. Code is already self-documenting.



##########
native/spark-expr/src/json_funcs/from_json.rs:
##########
@@ -0,0 +1,652 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use arrow::array::{
+    Array, ArrayRef, BooleanBuilder, Float32Builder, Float64Builder, 
Int32Builder, Int64Builder,
+    RecordBatch, StringBuilder, StructArray,
+};
+use arrow::datatypes::{DataType, Field, Schema};
+use datafusion::common::Result;
+use datafusion::physical_expr::PhysicalExpr;
+use datafusion::physical_plan::ColumnarValue;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::sync::Arc;
+
+/// from_json function - parses JSON strings into structured types
+#[derive(Debug, Eq)]
+pub struct FromJson {
+    /// The JSON string input expression
+    expr: Arc<dyn PhysicalExpr>,
+    /// Target schema for parsing
+    schema: DataType,
+    /// Timezone for timestamp parsing (future use)
+    timezone: String,
+}
+
+impl PartialEq for FromJson {
+    fn eq(&self, other: &Self) -> bool {
+        self.expr.eq(&other.expr) && self.schema == other.schema && 
self.timezone == other.timezone
+    }
+}
+
+impl std::hash::Hash for FromJson {
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.expr.hash(state);
+        // Note: DataType doesn't implement Hash, so we hash its debug 
representation
+        format!("{:?}", self.schema).hash(state);
+        self.timezone.hash(state);
+    }
+}
+
+impl FromJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, schema: DataType, timezone: &str) 
-> Self {
+        Self {
+            expr,
+            schema,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for FromJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "from_json({}, schema={:?}, timezone={})",
+            self.expr, self.schema, self.timezone
+        )
+    }
+}
+
+impl PartialEq<dyn Any> for FromJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<FromJson>() {
+            self.expr.eq(&other.expr)
+                && self.schema == other.schema
+                && self.timezone == other.timezone
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for FromJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn fmt_sql(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
+        unimplemented!()
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(self.schema.clone())
+    }
+
+    fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
+        // Always nullable - parse errors return null in PERMISSIVE mode
+        Ok(true)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(json_string_to_struct(
+            &input,
+            &self.schema,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(
+            Arc::clone(&children[0]),
+            self.schema.clone(),
+            &self.timezone,
+        )))
+    }
+}
+
+/// Parse JSON string array into struct array
+fn json_string_to_struct(arr: &Arc<dyn Array>, schema: &DataType) -> 
Result<ArrayRef> {
+    use arrow::array::StringArray;
+    use arrow::buffer::NullBuffer;
+
+    // Input must be string array
+    let string_array = 
arr.as_any().downcast_ref::<StringArray>().ok_or_else(|| {
+        datafusion::common::DataFusionError::Execution("from_json expects 
string input".to_string())
+    })?;
+
+    // Schema must be struct
+    let DataType::Struct(fields) = schema else {
+        return Err(datafusion::common::DataFusionError::Execution(
+            "from_json requires struct schema".to_string(),
+        ));
+    };
+
+    // Build struct array by parsing each JSON string
+    let num_rows = string_array.len();
+
+    // Create builders for each field
+    let mut field_builders = create_field_builders(fields, num_rows)?;
+
+    // Track which rows should be null at the struct level
+    let mut struct_nulls = vec![true; num_rows];
+
+    // Parse each row
+    for (row_idx, struct_null) in struct_nulls.iter_mut().enumerate() {
+        if string_array.is_null(row_idx) {
+            // Null input -> null struct
+            *struct_null = false;
+            append_null_to_all_builders(&mut field_builders);
+        } else {
+            let json_str = string_array.value(row_idx);
+
+            // Parse JSON (PERMISSIVE mode: return null fields on error)
+            match serde_json::from_str::<serde_json::Value>(json_str) {
+                Ok(json_value) => {
+                    if let serde_json::Value::Object(obj) = json_value {
+                        // Struct is not null, extract each field
+                        *struct_null = true;
+                        for (field, builder) in 
fields.iter().zip(field_builders.iter_mut()) {
+                            let field_value = obj.get(field.name());
+                            append_field_value(builder, field, field_value)?;
+                        }
+                    } else {
+                        // Not an object -> struct with null fields
+                        *struct_null = true;
+                        append_null_to_all_builders(&mut field_builders);
+                    }
+                }
+                Err(_) => {
+                    // Parse error -> struct with null fields (PERMISSIVE mode)
+                    *struct_null = true;
+                    append_null_to_all_builders(&mut field_builders);
+                }
+            }
+        }
+    }
+
+    // Finish builders

Review Comment:
   "what" not "why"



##########
native/spark-expr/src/json_funcs/from_json.rs:
##########
@@ -0,0 +1,652 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use arrow::array::{
+    Array, ArrayRef, BooleanBuilder, Float32Builder, Float64Builder, 
Int32Builder, Int64Builder,
+    RecordBatch, StringBuilder, StructArray,
+};
+use arrow::datatypes::{DataType, Field, Schema};
+use datafusion::common::Result;
+use datafusion::physical_expr::PhysicalExpr;
+use datafusion::physical_plan::ColumnarValue;
+use std::any::Any;
+use std::fmt::{Debug, Display, Formatter};
+use std::sync::Arc;
+
+/// from_json function - parses JSON strings into structured types
+#[derive(Debug, Eq)]
+pub struct FromJson {
+    /// The JSON string input expression
+    expr: Arc<dyn PhysicalExpr>,
+    /// Target schema for parsing
+    schema: DataType,
+    /// Timezone for timestamp parsing (future use)
+    timezone: String,
+}
+
+impl PartialEq for FromJson {
+    fn eq(&self, other: &Self) -> bool {
+        self.expr.eq(&other.expr) && self.schema == other.schema && 
self.timezone == other.timezone
+    }
+}
+
+impl std::hash::Hash for FromJson {
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.expr.hash(state);
+        // Note: DataType doesn't implement Hash, so we hash its debug 
representation
+        format!("{:?}", self.schema).hash(state);
+        self.timezone.hash(state);
+    }
+}
+
+impl FromJson {
+    pub fn new(expr: Arc<dyn PhysicalExpr>, schema: DataType, timezone: &str) 
-> Self {
+        Self {
+            expr,
+            schema,
+            timezone: timezone.to_owned(),
+        }
+    }
+}
+
+impl Display for FromJson {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "from_json({}, schema={:?}, timezone={})",
+            self.expr, self.schema, self.timezone
+        )
+    }
+}
+
+impl PartialEq<dyn Any> for FromJson {
+    fn eq(&self, other: &dyn Any) -> bool {
+        if let Some(other) = other.downcast_ref::<FromJson>() {
+            self.expr.eq(&other.expr)
+                && self.schema == other.schema
+                && self.timezone == other.timezone
+        } else {
+            false
+        }
+    }
+}
+
+impl PhysicalExpr for FromJson {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+
+    fn fmt_sql(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
+        unimplemented!()
+    }
+
+    fn data_type(&self, _: &Schema) -> Result<DataType> {
+        Ok(self.schema.clone())
+    }
+
+    fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
+        // Always nullable - parse errors return null in PERMISSIVE mode
+        Ok(true)
+    }
+
+    fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
+        let input = self.expr.evaluate(batch)?.into_array(batch.num_rows())?;
+        Ok(ColumnarValue::Array(json_string_to_struct(
+            &input,
+            &self.schema,
+        )?))
+    }
+
+    fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
+        vec![&self.expr]
+    }
+
+    fn with_new_children(
+        self: Arc<Self>,
+        children: Vec<Arc<dyn PhysicalExpr>>,
+    ) -> Result<Arc<dyn PhysicalExpr>> {
+        assert!(children.len() == 1);
+        Ok(Arc::new(Self::new(
+            Arc::clone(&children[0]),
+            self.schema.clone(),
+            &self.timezone,
+        )))
+    }
+}
+
+/// Parse JSON string array into struct array
+fn json_string_to_struct(arr: &Arc<dyn Array>, schema: &DataType) -> 
Result<ArrayRef> {
+    use arrow::array::StringArray;
+    use arrow::buffer::NullBuffer;
+
+    // Input must be string array
+    let string_array = 
arr.as_any().downcast_ref::<StringArray>().ok_or_else(|| {
+        datafusion::common::DataFusionError::Execution("from_json expects 
string input".to_string())
+    })?;
+
+    // Schema must be struct
+    let DataType::Struct(fields) = schema else {
+        return Err(datafusion::common::DataFusionError::Execution(
+            "from_json requires struct schema".to_string(),
+        ));
+    };
+
+    // Build struct array by parsing each JSON string

Review Comment:
   Possible misplaced comment, or just generally refers to block of code below.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to