tustvold commented on code in PR #4160:
URL: https://github.com/apache/arrow-rs/pull/4160#discussion_r1182476936


##########
parquet/src/bin/parquet-fromcsv.rs:
##########
@@ -626,14 +658,71 @@ mod tests {
         schema.as_file().write_all(schema_text.as_bytes()).unwrap();
 
         let mut input_file = NamedTempFile::new().unwrap();
-        {
-            let csv = input_file.as_file_mut();
+
+        fn wirte_tmp_file<T: Write>(w: &mut T) {

Review Comment:
   ```suggestion
           fn write_tmp_file<T: Write>(w: &mut T) {
   ```



##########
parquet/src/bin/parquet-fromcsv.rs:
##########
@@ -368,9 +376,28 @@ fn convert_csv_to_parquet(args: &Args) -> Result<(), 
ParquetFromCsvError> {
             &format!("Failed to open input file {:#?}", &args.input_file),
         )
     })?;
+
+    // open input file decoder
+    let input_file_decoder = match args.csv_compression {
+        Compression::UNCOMPRESSED => Box::new(input_file) as Box<dyn Read>,
+        Compression::SNAPPY => Box::new(FrameDecoder::new(input_file)) as 
Box<dyn Read>,
+        Compression::GZIP(_) => Box::new(GzDecoder::new(input_file)) as 
Box<dyn Read>,
+        Compression::BROTLI(_) => {
+            Box::new(Decompressor::new(input_file, 0)) as Box<dyn Read>
+        }
+        Compression::LZ4 => Box::new(lz4::Decoder::new(input_file).map_err(|e| 
{
+            ParquetFromCsvError::with_context(e, "Failed to create 
lz4::Decoder")
+        })?) as Box<dyn Read>,
+        Compression::ZSTD(_) => 
Box::new(zstd::Decoder::new(input_file).map_err(|e| {
+            ParquetFromCsvError::with_context(e, "Failed to create 
zstd::Decoder")
+        })?) as Box<dyn Read>,
+        // TODO: I wonder which crates should i use to decompress lzo and 
lz4_raw?
+        _ => panic!("compression type not support yet"),

Review Comment:
   ```suggestion
           d => unimplemented!("compression type {d}"),
   ```



##########
parquet/src/bin/parquet-fromcsv.rs:
##########
@@ -368,9 +376,28 @@ fn convert_csv_to_parquet(args: &Args) -> Result<(), 
ParquetFromCsvError> {
             &format!("Failed to open input file {:#?}", &args.input_file),
         )
     })?;
+
+    // open input file decoder
+    let input_file_decoder = match args.csv_compression {
+        Compression::UNCOMPRESSED => Box::new(input_file) as Box<dyn Read>,
+        Compression::SNAPPY => Box::new(FrameDecoder::new(input_file)) as 
Box<dyn Read>,
+        Compression::GZIP(_) => Box::new(GzDecoder::new(input_file)) as 
Box<dyn Read>,
+        Compression::BROTLI(_) => {
+            Box::new(Decompressor::new(input_file, 0)) as Box<dyn Read>
+        }
+        Compression::LZ4 => Box::new(lz4::Decoder::new(input_file).map_err(|e| 
{
+            ParquetFromCsvError::with_context(e, "Failed to create 
lz4::Decoder")
+        })?) as Box<dyn Read>,
+        Compression::ZSTD(_) => 
Box::new(zstd::Decoder::new(input_file).map_err(|e| {
+            ParquetFromCsvError::with_context(e, "Failed to create 
zstd::Decoder")
+        })?) as Box<dyn Read>,
+        // TODO: I wonder which crates should i use to decompress lzo and 
lz4_raw?

Review Comment:
   They are codecs that only make sense in the context of parquet's block 
compression



##########
parquet/src/bin/parquet-fromcsv.rs:
##########
@@ -72,20 +73,24 @@
 use std::{
     fmt::Display,
     fs::{read_to_string, File},
+    io::Read,
     path::{Path, PathBuf},
     sync::Arc,
 };
 
 use arrow_csv::ReaderBuilder;
 use arrow_schema::{ArrowError, Schema};
+use brotli::Decompressor;
 use clap::{Parser, ValueEnum};
+use flate2::read::GzDecoder;

Review Comment:
   I believe the required-features for `parquet-fromcsv` need to be updated to 
include the various compression codecs, or we need to make these imports gated 
on these features being enabled



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to