pitrou commented on code in PR #35758:
URL: https://github.com/apache/arrow/pull/35758#discussion_r1211480269
##########
cpp/src/parquet/metadata_test.cc:
##########
@@ -160,8 +159,22 @@ TEST(Metadata, TestBuildAccess) {
ASSERT_EQ(DEFAULT_COMPRESSION_TYPE, rg1_column2->compression());
ASSERT_EQ(nrows / 2, rg1_column1->num_values());
ASSERT_EQ(nrows / 2, rg1_column2->num_values());
- ASSERT_EQ(3, rg1_column1->encodings().size());
- ASSERT_EQ(3, rg1_column2->encodings().size());
+ {
+ std::set<parquet::Encoding::type> encodings{parquet::Encoding::RLE,
+
parquet::Encoding::RLE_DICTIONARY,
+ parquet::Encoding::PLAIN};
+ auto& encoding_vec = rg1_column1->encodings();
Review Comment:
Nit: style
```suggestion
const auto& encoding_vec = rg1_column1->encodings();
```
##########
cpp/src/parquet/metadata_test.cc:
##########
@@ -160,8 +159,22 @@ TEST(Metadata, TestBuildAccess) {
ASSERT_EQ(DEFAULT_COMPRESSION_TYPE, rg1_column2->compression());
ASSERT_EQ(nrows / 2, rg1_column1->num_values());
ASSERT_EQ(nrows / 2, rg1_column2->num_values());
- ASSERT_EQ(3, rg1_column1->encodings().size());
- ASSERT_EQ(3, rg1_column2->encodings().size());
+ {
+ std::set<parquet::Encoding::type> encodings{parquet::Encoding::RLE,
+
parquet::Encoding::RLE_DICTIONARY,
+ parquet::Encoding::PLAIN};
+ auto& encoding_vec = rg1_column1->encodings();
+ ASSERT_EQ(encodings,
std::set<parquet::Encoding::type>(encoding_vec.begin(),
+
encoding_vec.end()));
+ }
+ {
+ std::set<parquet::Encoding::type> encodings{parquet::Encoding::RLE,
+
parquet::Encoding::RLE_DICTIONARY,
+ parquet::Encoding::PLAIN};
+ auto& encoding_vec = rg1_column2->encodings();
Review Comment:
```suggestion
const auto& encoding_vec = rg1_column2->encodings();
```
##########
cpp/src/parquet/metadata.cc:
##########
@@ -1462,40 +1462,50 @@ class
ColumnChunkMetaDataBuilder::ColumnChunkMetaDataBuilderImpl {
column_chunk_->meta_data.__set_total_compressed_size(compressed_size);
std::vector<format::Encoding::type> thrift_encodings;
+ std::vector<format::PageEncodingStats> thrift_encoding_stats;
+ // Add dictionary page encoding stats
if (has_dictionary) {
-
thrift_encodings.push_back(ToThrift(properties_->dictionary_index_encoding()));
- if (properties_->version() == ParquetVersion::PARQUET_1_0) {
- thrift_encodings.push_back(ToThrift(Encoding::PLAIN));
- } else {
-
thrift_encodings.push_back(ToThrift(properties_->dictionary_page_encoding()));
+ for (const auto& entry : dict_encoding_stats) {
+ format::PageEncodingStats dict_enc_stat;
+ dict_enc_stat.__set_page_type(format::PageType::DICTIONARY_PAGE);
+ // Dictionary Encoding would be PLAIN_DICTIONARY in v1 and
+ // PLAIN in v2.
+ format::Encoding::type dict_encoding = ToThrift(entry.first);
+ dict_enc_stat.__set_encoding(dict_encoding);
+ dict_enc_stat.__set_count(entry.second);
+ thrift_encoding_stats.push_back(dict_enc_stat);
+ auto iter =
Review Comment:
Can define a small helper function:
```c++
auto add_encoding = [&thrift_encodings](format::Encoding::type value) {
auto it = std::find(thrift_encodings.begin(), thrift_encodings.end(),
value);
if (it == thrift_encodings.end()) {
thrift_encodings.push_back(value);
}
};
```
##########
cpp/src/parquet/metadata.cc:
##########
@@ -1462,40 +1462,48 @@ class
ColumnChunkMetaDataBuilder::ColumnChunkMetaDataBuilderImpl {
column_chunk_->meta_data.__set_total_compressed_size(compressed_size);
std::vector<format::Encoding::type> thrift_encodings;
+ std::vector<format::PageEncodingStats> thrift_encoding_stats;
+ // Add dictionary page encoding stats
if (has_dictionary) {
-
thrift_encodings.push_back(ToThrift(properties_->dictionary_index_encoding()));
- if (properties_->version() == ParquetVersion::PARQUET_1_0) {
- thrift_encodings.push_back(ToThrift(Encoding::PLAIN));
- } else {
-
thrift_encodings.push_back(ToThrift(properties_->dictionary_page_encoding()));
+ for (const auto& entry : dict_encoding_stats) {
+ format::PageEncodingStats dict_enc_stat;
+ dict_enc_stat.__set_page_type(format::PageType::DICTIONARY_PAGE);
+ format::Encoding::type dict_encoding = ToThrift(entry.first);
+ dict_enc_stat.__set_encoding(dict_encoding);
+ dict_enc_stat.__set_count(entry.second);
+ thrift_encoding_stats.push_back(dict_enc_stat);
+ auto iter =
+ std::find(thrift_encodings.begin(), thrift_encodings.end(),
dict_encoding);
+ if (iter == thrift_encodings.end()) {
+ thrift_encodings.push_back(dict_encoding);
+ }
}
- } else { // Dictionary not enabled
-
thrift_encodings.push_back(ToThrift(properties_->encoding(column_->path())));
+ // Dictionary Data Page Encoding.
+
thrift_encodings.push_back(ToThrift(properties_->dictionary_page_encoding()));
}
+ // Force add encoding for RL/DL.
thrift_encodings.push_back(ToThrift(Encoding::RLE));
- // Only PLAIN encoding is supported for fallback in V1
- // TODO(majetideepak): Use user specified encoding for V2
- if (dictionary_fallback) {
- thrift_encodings.push_back(ToThrift(Encoding::PLAIN));
- }
- column_chunk_->meta_data.__set_encodings(thrift_encodings);
- std::vector<format::PageEncodingStats> thrift_encoding_stats;
- // Add dictionary page encoding stats
- for (const auto& entry : dict_encoding_stats) {
- format::PageEncodingStats dict_enc_stat;
- dict_enc_stat.__set_page_type(format::PageType::DICTIONARY_PAGE);
- dict_enc_stat.__set_encoding(ToThrift(entry.first));
- dict_enc_stat.__set_count(entry.second);
- thrift_encoding_stats.push_back(dict_enc_stat);
- }
// Add data page encoding stats
for (const auto& entry : data_encoding_stats) {
format::PageEncodingStats data_enc_stat;
data_enc_stat.__set_page_type(format::PageType::DATA_PAGE);
- data_enc_stat.__set_encoding(ToThrift(entry.first));
+ format::Encoding::type data_encoding = ToThrift(entry.first);
+ data_enc_stat.__set_encoding(data_encoding);
data_enc_stat.__set_count(entry.second);
thrift_encoding_stats.push_back(data_enc_stat);
+ if (data_encoding == format::Encoding::PLAIN_DICTIONARY &&
+ properties_->version() == ParquetVersion::PARQUET_1_0) {
+ // For Parquet V1, Dictionary Data Page and Dictionary Index Page
encodings
+ // is PLAIN_DICTIONARY, but the actual DATA is PLAIN, so force adding
PLAIN.
+ data_encoding = format::Encoding::PLAIN;
Review Comment:
I still don't understand why you're doing this. Here is what parquet-mr does
for v1 and v2 data pages:
https://github.com/apache/parquet-mr/blob/f15b9c42997e3e0af991462529942e044ace6651/parquet-column/src/main/java/org/apache/parquet/column/values/factory/DefaultV1ValuesWriterFactory.java#L41-L47
https://github.com/apache/parquet-mr/blob/f15b9c42997e3e0af991462529942e044ace6651/parquet-column/src/main/java/org/apache/parquet/column/values/factory/DefaultV2ValuesWriterFactory.java#L45-L51
##########
cpp/src/parquet/metadata_test.cc:
##########
@@ -160,8 +159,22 @@ TEST(Metadata, TestBuildAccess) {
ASSERT_EQ(DEFAULT_COMPRESSION_TYPE, rg1_column2->compression());
ASSERT_EQ(nrows / 2, rg1_column1->num_values());
ASSERT_EQ(nrows / 2, rg1_column2->num_values());
- ASSERT_EQ(3, rg1_column1->encodings().size());
- ASSERT_EQ(3, rg1_column2->encodings().size());
+ {
+ std::set<parquet::Encoding::type> encodings{parquet::Encoding::RLE,
+
parquet::Encoding::RLE_DICTIONARY,
+ parquet::Encoding::PLAIN};
+ auto& encoding_vec = rg1_column1->encodings();
+ ASSERT_EQ(encodings,
std::set<parquet::Encoding::type>(encoding_vec.begin(),
+
encoding_vec.end()));
+ }
+ {
+ std::set<parquet::Encoding::type> encodings{parquet::Encoding::RLE,
+
parquet::Encoding::RLE_DICTIONARY,
+ parquet::Encoding::PLAIN};
+ auto& encoding_vec = rg1_column2->encodings();
+ ASSERT_EQ(encodings,
std::set<parquet::Encoding::type>(encoding_vec.begin(),
+
encoding_vec.end()));
+ }
Review Comment:
Perhaps define and use a helper function:
```c++
void AssertEncodings(const ColumnChunkMetaData&, const
std::set<parquet::Encoding::type>& expected);
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]