[GitHub] [arrow] alamb commented on a change in pull request #8009: ARROW-9790: [Rust][Parquet]: Increase test coverage in arrow_reader.rs

2020-08-30 Thread GitBox


alamb commented on a change in pull request #8009:
URL: https://github.com/apache/arrow/pull/8009#discussion_r479754464



##
File path: rust/parquet/src/arrow/arrow_reader.rs
##
@@ -296,51 +296,12 @@ mod tests {
 ";
 
 let converter = FromConverter::new();
-single_column_reader_test::<
+run_single_column_reader_tests::<
 BoolType,
 BooleanArray,
 FromConverter>, BooleanArray>,
 BoolType,
->(2, 100, 2, message_type, 15, 50, converter);
-}
-
-#[test]
-fn 
test_bool_single_column_reader_test_batch_size_divides_into_row_group_size() {

Review comment:
   The coverage of these tests was moved into two table entries here: 
https://github.com/apache/arrow/pull/8009/files#diff-4c103051156d7b901fad8d9e26104932R393





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [arrow] alamb commented on a change in pull request #8009: ARROW-9790: [Rust][Parquet]: Increase test coverage in arrow_reader.rs

2020-08-21 Thread GitBox


alamb commented on a change in pull request #8009:
URL: https://github.com/apache/arrow/pull/8009#discussion_r474756757



##
File path: rust/parquet/src/arrow/arrow_reader.rs
##
@@ -348,30 +348,101 @@ mod tests {
 ";
 
 let converter = Utf8ArrayConverter {};
-single_column_reader_test::<
+run_single_column_reader_tests::<
 ByteArrayType,
 StringArray,
 Utf8ArrayConverter,
 RandUtf8Gen,
->(2, 100, 2, message_type, 15, 50, converter);
+>(2, message_type, );
 }
 
-fn single_column_reader_test(
+/// Parameters for single_column_reader_test
+#[derive(Debug)]
+struct TestOptions {
+/// Number of row group to write to parquet (row group size =
+/// num_row_groups / num_rows)
 num_row_groups: usize,
+/// Total number of rows
 num_rows: usize,
-rand_max: i32,
-message_type: ,
+/// Size of batches to read back
 record_batch_size: usize,
+/// Total number of batches to attempt to read.
+/// `record_batch_size` * `num_iterations` should be greater
+/// than `num_rows` to ensure the data can be read back completely
 num_iterations: usize,
-converter: C,
+}
+
+impl TestOptions {
+fn new(
+num_row_groups: usize,
+num_rows: usize,
+record_batch_size: usize,
+num_iterations: usize,
+) -> Self {
+TestOptions {
+num_row_groups,
+num_rows,
+record_batch_size,
+num_iterations,
+}
+}
+}
+
+/// Create a parquet file and then read it using
+/// `ParquetFileArrowReader` using a standard set of parameters
+/// `opts`.
+///
+/// `rand_max` represents the maximum size of value to pass to to
+/// value generator
+fn run_single_column_reader_tests(
+rand_max: i32,
+message_type: ,
+converter: ,
 ) where
 T: DataType,
 G: RandGen,
 A: PartialEq + Array + 'static,
 C: Converter>, A> + 'static,
 {
-let values: Vec> = (0..num_row_groups)
-.map(|_| G::gen_vec(rand_max, num_rows))
+let all_options = vec![
+TestOptions::new(2, 100, 15, 50),
+// batch size (5) so batches to fall on row group
+// boundaries (25 rows in 3 row groups --> row groups of
+// 10, 10, and 5) to test edge refilling edge cases.
+TestOptions::new(3, 25, 5, 50),

Review comment:
   Done in 
https://github.com/apache/arrow/pull/8009/commits/f149500a86ff3c557015434b4c2f7811a63b10bb





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [arrow] alamb commented on a change in pull request #8009: ARROW-9790: [Rust][Parquet]: Increase test coverage in arrow_reader.rs

2020-08-19 Thread GitBox


alamb commented on a change in pull request #8009:
URL: https://github.com/apache/arrow/pull/8009#discussion_r473348774



##
File path: rust/parquet/src/arrow/arrow_reader.rs
##
@@ -348,30 +348,101 @@ mod tests {
 ";
 
 let converter = Utf8ArrayConverter {};
-single_column_reader_test::<
+run_single_column_reader_tests::<
 ByteArrayType,
 StringArray,
 Utf8ArrayConverter,
 RandUtf8Gen,
->(2, 100, 2, message_type, 15, 50, converter);
+>(2, message_type, );
 }
 
-fn single_column_reader_test(
+/// Parameters for single_column_reader_test
+#[derive(Debug)]
+struct TestOptions {
+/// Number of row group to write to parquet (row group size =
+/// num_row_groups / num_rows)
 num_row_groups: usize,
+/// Total number of rows
 num_rows: usize,
-rand_max: i32,
-message_type: ,
+/// Size of batches to read back
 record_batch_size: usize,
+/// Total number of batches to attempt to read.
+/// `record_batch_size` * `num_iterations` should be greater
+/// than `num_rows` to ensure the data can be read back completely
 num_iterations: usize,
-converter: C,
+}
+
+impl TestOptions {
+fn new(
+num_row_groups: usize,
+num_rows: usize,
+record_batch_size: usize,
+num_iterations: usize,
+) -> Self {
+TestOptions {
+num_row_groups,
+num_rows,
+record_batch_size,
+num_iterations,
+}
+}
+}
+
+/// Create a parquet file and then read it using
+/// `ParquetFileArrowReader` using a standard set of parameters
+/// `opts`.
+///
+/// `rand_max` represents the maximum size of value to pass to to
+/// value generator
+fn run_single_column_reader_tests(
+rand_max: i32,
+message_type: ,
+converter: ,
 ) where
 T: DataType,
 G: RandGen,
 A: PartialEq + Array + 'static,
 C: Converter>, A> + 'static,
 {
-let values: Vec> = (0..num_row_groups)
-.map(|_| G::gen_vec(rand_max, num_rows))
+let all_options = vec![
+TestOptions::new(2, 100, 15, 50),
+// batch size (5) so batches to fall on row group
+// boundaries (25 rows in 3 row groups --> row groups of
+// 10, 10, and 5) to test edge refilling edge cases.
+TestOptions::new(3, 25, 5, 50),

Review comment:
   This is a good idea -- I will do so before turning this into a full on 
PR for review





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [arrow] alamb commented on a change in pull request #8009: ARROW-9790: [Rust][Parquet]: Increase test coverage in arrow_reader.rs

2020-08-19 Thread GitBox


alamb commented on a change in pull request #8009:
URL: https://github.com/apache/arrow/pull/8009#discussion_r473111954



##
File path: rust/parquet/src/arrow/arrow_reader.rs
##
@@ -348,30 +348,101 @@ mod tests {
 ";
 
 let converter = Utf8ArrayConverter {};
-single_column_reader_test::<
+run_single_column_reader_tests::<
 ByteArrayType,
 StringArray,
 Utf8ArrayConverter,
 RandUtf8Gen,
->(2, 100, 2, message_type, 15, 50, converter);
+>(2, message_type, );
 }
 
-fn single_column_reader_test(
+/// Parameters for single_column_reader_test
+#[derive(Debug)]
+struct TestOptions {
+/// Number of row group to write to parquet (row group size =
+/// num_row_groups / num_rows)
 num_row_groups: usize,
+/// Total number of rows
 num_rows: usize,
-rand_max: i32,
-message_type: ,
+/// Size of batches to read back
 record_batch_size: usize,
+/// Total number of batches to attempt to read.
+/// `record_batch_size` * `num_iterations` should be greater
+/// than `num_rows` to ensure the data can be read back completely
 num_iterations: usize,
-converter: C,
+}
+
+impl TestOptions {
+fn new(
+num_row_groups: usize,
+num_rows: usize,
+record_batch_size: usize,
+num_iterations: usize,
+) -> Self {
+TestOptions {
+num_row_groups,
+num_rows,
+record_batch_size,
+num_iterations,
+}
+}
+}
+
+/// Create a parquet file and then read it using
+/// `ParquetFileArrowReader` using a standard set of parameters
+/// `opts`.
+///
+/// `rand_max` represents the maximum size of value to pass to to
+/// value generator
+fn run_single_column_reader_tests(
+rand_max: i32,
+message_type: ,
+converter: ,
 ) where
 T: DataType,
 G: RandGen,
 A: PartialEq + Array + 'static,
 C: Converter>, A> + 'static,
 {
-let values: Vec> = (0..num_row_groups)
-.map(|_| G::gen_vec(rand_max, num_rows))
+let all_options = vec![

Review comment:
   This is the basic idea -- drive the test with a table that is shared 
between bool, utf, and fixed_len_binary_array types





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org