emkornfield commented on a change in pull request #9791:
URL: https://github.com/apache/arrow/pull/9791#discussion_r601625820



##########
File path: cpp/src/parquet/file_serialize_test.cc
##########
@@ -123,16 +123,30 @@ class TestSerialize : public PrimitiveTypedTest<TestType> 
{
 
     for (int rg = 0; rg < num_rowgroups_; ++rg) {
       auto rg_reader = file_reader->RowGroup(rg);
-      ASSERT_EQ(num_columns_, rg_reader->metadata()->num_columns());
-      ASSERT_EQ(rows_per_rowgroup_, rg_reader->metadata()->num_rows());
+      auto rg_metadata = rg_reader->metadata();
+      ASSERT_EQ(num_columns_, rg_metadata->num_columns());
+      ASSERT_EQ(rows_per_rowgroup_, rg_metadata->num_rows());
       // Check that the specified compression was actually used.
-      ASSERT_EQ(expected_codec_type,
-                rg_reader->metadata()->ColumnChunk(0)->compression());
+      ASSERT_EQ(expected_codec_type, 
rg_metadata->ColumnChunk(0)->compression());
+
+      const int64_t total_byte_size = rg_metadata->total_byte_size();
+      const int64_t total_compressed_size = 
rg_metadata->total_compressed_size();
+      if (expected_codec_type == Compression::UNCOMPRESSED) {
+        ASSERT_EQ(total_byte_size, total_compressed_size);
+      } else {
+        ASSERT_NE(total_byte_size, total_compressed_size);

Review comment:
       nit, i guess this is technically correct, as there some cases that 
compression might enlarge buffers but maybe ASSERT_GE?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to