adriangb commented on code in PR #7015:
URL: https://github.com/apache/arrow-rs/pull/7015#discussion_r1929592262


##########
arrow-json/src/writer/encoder.rs:
##########
@@ -24,11 +26,42 @@ use arrow_schema::{ArrowError, DataType, FieldRef};
 use half::f16;
 use lexical_core::FormattedSize;
 use serde::Serializer;
-use std::io::Write;
 
 #[derive(Debug, Clone, Default)]
 pub struct EncoderOptions {
     pub explicit_nulls: bool,
+    pub encoder_factory: Option<Arc<dyn EncoderFactory>>,
+}
+
+type EncoderFactoryResult<'a> =
+    Result<Option<(Box<dyn Encoder + 'a>, Option<NullBuffer>)>, ArrowError>;
+
+/// A trait to create custom encoders for specific data types.
+///
+/// This allows overriding the default encoders for specific data types,
+/// or adding new encoders for custom data types.
+pub trait EncoderFactory: std::fmt::Debug {
+    /// Make an encoder that if returned runs before all of the default 
encoders.
+    /// This can be used to override how e.g. binary data is encoded so that 
it is an encoded string or an array of integers.
+    fn make_default_encoder<'a>(
+        &self,
+        _array: &'a dyn Array,
+        _data_type: &DataType,

Review Comment:
   We don't need to, it just simplifies the implementation of the 
`EncoderFactory` a bit because almost surely all of them are going to call 
`array.data_type()`. I can remove if you'd prefer.



##########
arrow-json/src/writer/mod.rs:
##########
@@ -426,10 +438,13 @@ mod tests {
 
         let actual: Vec<Option<Value>> = input
             .split(|b| *b == b'\n')
-            .map(|s| (!s.is_empty()).then(|| 
serde_json::from_slice(s).unwrap()))
+            .map(|s| {
+                println!("{:?}", str::from_utf8(s));

Review Comment:
   upsies!



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to