wgtmac commented on code in PR #35825:
URL: https://github.com/apache/arrow/pull/35825#discussion_r1231770730
##########
cpp/src/parquet/column_reader.cc:
##########
@@ -2093,15 +2093,45 @@ class FLBARecordReader : public
TypedRecordReader<FLBAType>,
std::unique_ptr<::arrow::FixedSizeBinaryBuilder> builder_;
};
-class ByteArrayChunkedRecordReader : public TypedRecordReader<ByteArrayType>,
- virtual public BinaryRecordReader {
+// TODO Below concept could be used to simplify type assertion,
+// but it requires c++20
+// template <typename T>
+// concept ByteArrayTypeConcept = std::is_same<T, ByteArrayType>::value ||
+// std::is_same<T, LargeByteArrayType>::value;
Review Comment:
```suggestion
// TODO: Below concept could be used to simplify type assertion in C++20.
// template <typename T>
// concept ByteArrayTypeConcept = std::is_same<T, ByteArrayType>::value ||
// std::is_same<T, LargeByteArrayType>::value;
```
##########
cpp/src/parquet/arrow/reader_internal.cc:
##########
@@ -487,8 +487,9 @@ Status TransferBinary(RecordReader* reader, MemoryPool*
pool,
auto chunks = binary_reader->GetBuilderChunks();
for (auto& chunk : chunks) {
if (!chunk->type()->Equals(*logical_type_field->type())) {
- // XXX: if a LargeBinary chunk is larger than 2GB, the MSBs of offsets
- // will be lost because they are first created as int32 and then cast to
int64.
+ // XXX: if a LargeBinary chunk is larger than 2GB and
use_large_binary_variants
Review Comment:
`XXX` should be removed...
##########
cpp/src/parquet/encoding.cc:
##########
@@ -1484,6 +1521,39 @@ class DictDecoderImpl : public DecoderImpl, virtual
public DictDecoder<Type> {
// Perform type-specific initiatialization
void SetDict(TypedDecoder<Type>* dictionary) override;
+ template <typename T = Type,
+ typename = std::enable_if_t<std::is_same_v<T, ByteArrayType> ||
+ std::is_same_v<T, LargeByteArrayType>>>
+ void SetByteArrayDict(TypedDecoder<Type>* dictionary) {
+ DecodeDict(dictionary);
+
+ auto dict_values =
reinterpret_cast<ByteArray*>(dictionary_->mutable_data());
+
+ using offset_type = typename EncodingTraits<Type>::ArrowType::offset_type;
+
+ offset_type total_size = 0;
+ for (int i = 0; i < dictionary_length_; ++i) {
+ total_size += dict_values[i].len;
Review Comment:
Should we use `internal::AddWithOverflow` just in case?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]