alamb commented on code in PR #9497:
URL: https://github.com/apache/arrow-rs/pull/9497#discussion_r2874800389


##########
arrow-json/src/reader/list_array.rs:
##########
@@ -93,15 +93,14 @@ impl<O: OffsetSizeTrait> ArrayDecoder for 
ListArrayDecoder<O> {
 
         let child_data = self.decoder.decode(tape, &child_pos)?;
         let nulls = nulls.as_mut().map(|x| NullBuffer::new(x.finish()));
+        let values = make_array(child_data);
+        let field = match &self.data_type {
+            DataType::List(f) | DataType::LargeList(f) => f.clone(),
+            _ => unreachable!(),
+        };
+        let offsets = 
OffsetBuffer::<O>::new(ScalarBuffer::from(offsets.finish()));
 
-        let data = ArrayDataBuilder::new(self.data_type.clone())
-            .len(pos.len())
-            .nulls(nulls)
-            .add_buffer(offsets.finish())
-            .child_data(vec![child_data]);
-
-        // Safety
-        // Validated lengths above
-        Ok(unsafe { data.build_unchecked() })
+        let array = GenericListArray::<O>::try_new(field, offsets, values, 
nulls)?;

Review Comment:
   Does `try_new` validate the offsets ? That could be a significant 
performance hit
   
   Basically as long as this doesn't do additional validation I think it looks 
good to me



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to