mapleFU commented on code in PR #33776:
URL: https://github.com/apache/arrow/pull/33776#discussion_r1092009795
##########
cpp/src/parquet/bloom_filter.cc:
##########
@@ -65,51 +69,109 @@ void BlockSplitBloomFilter::Init(const uint8_t* bitset,
uint32_t num_bytes) {
PARQUET_ASSIGN_OR_THROW(data_, ::arrow::AllocateBuffer(num_bytes_, pool_));
memcpy(data_->mutable_data(), bitset, num_bytes_);
- this->hasher_.reset(new MurmurHash3());
+ this->hasher_ = std::make_unique<XxHasher>();
}
-BlockSplitBloomFilter BlockSplitBloomFilter::Deserialize(ArrowInputStream*
input) {
- uint32_t len, hash, algorithm;
- int64_t bytes_available;
+static constexpr uint32_t kBloomFilterHeaderSizeGuess = 32;
+static constexpr uint32_t kMaxBloomFilterHeaderSize = 1024;
- PARQUET_ASSIGN_OR_THROW(bytes_available, input->Read(sizeof(uint32_t),
&len));
- if (static_cast<uint32_t>(bytes_available) != sizeof(uint32_t)) {
- throw ParquetException("Failed to deserialize from input stream");
+static ::arrow::Status ValidateBloomFilterHeader(
+ const format::BloomFilterHeader& header) {
+ if (!header.algorithm.__isset.BLOCK) {
+ std::stringstream ss;
+ ss << "Unsupported Bloom filter algorithm: " << header.algorithm << ".";
+ return ::arrow::Status::Invalid(ss.str());
}
- PARQUET_ASSIGN_OR_THROW(bytes_available, input->Read(sizeof(uint32_t),
&hash));
- if (static_cast<uint32_t>(bytes_available) != sizeof(uint32_t)) {
- throw ParquetException("Failed to deserialize from input stream");
+ if (!header.hash.__isset.XXHASH) {
+ std::stringstream ss;
+ ss << "Unsupported Bloom filter hash: " << header.hash << ".";
+ return ::arrow::Status::Invalid(ss.str());
}
- if (static_cast<HashStrategy>(hash) != HashStrategy::MURMUR3_X64_128) {
- throw ParquetException("Unsupported hash strategy");
+
+ if (!header.compression.__isset.UNCOMPRESSED) {
+ std::stringstream ss;
+ ss << "Unsupported Bloom filter compression: " << header.compression <<
".";
+ return ::arrow::Status::Invalid(ss.str());
}
- PARQUET_ASSIGN_OR_THROW(bytes_available, input->Read(sizeof(uint32_t),
&algorithm));
- if (static_cast<uint32_t>(bytes_available) != sizeof(uint32_t)) {
- throw ParquetException("Failed to deserialize from input stream");
+ if (header.numBytes <= 0 ||
+ static_cast<uint32_t>(header.numBytes) >
BloomFilter::kMaximumBloomFilterBytes) {
+ std::stringstream ss;
+ ss << "Bloom filter size is incorrect: " << header.numBytes << ". Must be
in range ("
+ << 0 << ", " << BloomFilter::kMaximumBloomFilterBytes << "].";
+ return ::arrow::Status::Invalid(ss.str());
}
- if (static_cast<Algorithm>(algorithm) != BloomFilter::Algorithm::BLOCK) {
- throw ParquetException("Unsupported Bloom filter algorithm");
+
+ return ::arrow::Status::OK();
+}
+
+BlockSplitBloomFilter BlockSplitBloomFilter::Deserialize(
+ const ReaderProperties& properties, ArrowInputStream* input) {
+ uint32_t length = kBloomFilterHeaderSizeGuess;
+ uint32_t header_size = 0;
+
+ ThriftDeserializer deserializer(properties);
+ format::BloomFilterHeader header;
+
+ // Read and deserialize bloom filter header
+ while (true) {
+ PARQUET_ASSIGN_OR_THROW(auto sv, input->Peek(length));
Review Comment:
I think `SerializedPageReader` already uses `Peek`, so at least in parquet,
using `Peek` is ok
##########
cpp/src/parquet/bloom_filter.cc:
##########
@@ -65,51 +69,109 @@ void BlockSplitBloomFilter::Init(const uint8_t* bitset,
uint32_t num_bytes) {
PARQUET_ASSIGN_OR_THROW(data_, ::arrow::AllocateBuffer(num_bytes_, pool_));
memcpy(data_->mutable_data(), bitset, num_bytes_);
- this->hasher_.reset(new MurmurHash3());
+ this->hasher_ = std::make_unique<XxHasher>();
}
-BlockSplitBloomFilter BlockSplitBloomFilter::Deserialize(ArrowInputStream*
input) {
- uint32_t len, hash, algorithm;
- int64_t bytes_available;
+static constexpr uint32_t kBloomFilterHeaderSizeGuess = 32;
+static constexpr uint32_t kMaxBloomFilterHeaderSize = 1024;
Review Comment:
I think 32B should be enough for a long time...
##########
cpp/src/parquet/bloom_filter.cc:
##########
@@ -65,51 +69,109 @@ void BlockSplitBloomFilter::Init(const uint8_t* bitset,
uint32_t num_bytes) {
PARQUET_ASSIGN_OR_THROW(data_, ::arrow::AllocateBuffer(num_bytes_, pool_));
memcpy(data_->mutable_data(), bitset, num_bytes_);
- this->hasher_.reset(new MurmurHash3());
+ this->hasher_ = std::make_unique<XxHasher>();
}
-BlockSplitBloomFilter BlockSplitBloomFilter::Deserialize(ArrowInputStream*
input) {
- uint32_t len, hash, algorithm;
- int64_t bytes_available;
+static constexpr uint32_t kBloomFilterHeaderSizeGuess = 32;
+static constexpr uint32_t kMaxBloomFilterHeaderSize = 1024;
- PARQUET_ASSIGN_OR_THROW(bytes_available, input->Read(sizeof(uint32_t),
&len));
- if (static_cast<uint32_t>(bytes_available) != sizeof(uint32_t)) {
- throw ParquetException("Failed to deserialize from input stream");
+static ::arrow::Status ValidateBloomFilterHeader(
+ const format::BloomFilterHeader& header) {
+ if (!header.algorithm.__isset.BLOCK) {
+ std::stringstream ss;
+ ss << "Unsupported Bloom filter algorithm: " << header.algorithm << ".";
+ return ::arrow::Status::Invalid(ss.str());
}
- PARQUET_ASSIGN_OR_THROW(bytes_available, input->Read(sizeof(uint32_t),
&hash));
- if (static_cast<uint32_t>(bytes_available) != sizeof(uint32_t)) {
- throw ParquetException("Failed to deserialize from input stream");
+ if (!header.hash.__isset.XXHASH) {
+ std::stringstream ss;
+ ss << "Unsupported Bloom filter hash: " << header.hash << ".";
+ return ::arrow::Status::Invalid(ss.str());
Review Comment:
Yeah, this is add by myself but I forgot it, lol...
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]