etseidl commented on code in PR #8524:
URL: https://github.com/apache/arrow-rs/pull/8524#discussion_r2400243654
##########
parquet/src/column/writer/encoder.rs:
##########
@@ -145,16 +152,20 @@ impl<T: DataType> ColumnValueEncoderImpl<T> {
fn write_slice(&mut self, slice: &[T::T]) -> Result<()> {
if self.statistics_enabled != EnabledStatistics::None
- // INTERVAL has undefined sort order, so don't write min/max stats
for it
+ // INTERVAL, Geometry, and Geography have undefined sort order,so
don't write min/max stats for them
&& self.descr.converted_type() != ConvertedType::INTERVAL
{
- if let Some((min, max)) = self.min_max(slice, None) {
- update_min(&self.descr, &min, &mut self.min_value);
- update_max(&self.descr, &max, &mut self.max_value);
- }
+ if let Some(accumulator) = self.geo_stats_accumulator.as_mut() {
+ update_geo_stats_accumulator(accumulator.as_mut(),
slice.iter());
+ } else {
+ if let Some((min, max)) = self.min_max(slice, None) {
+ update_min(&self.descr, &min, &mut self.min_value);
+ update_max(&self.descr, &max, &mut self.max_value);
+ }
- if let Some(var_bytes) = T::T::variable_length_bytes(slice) {
- *self.variable_length_bytes.get_or_insert(0) += var_bytes;
+ if let Some(var_bytes) = T::T::variable_length_bytes(slice) {
+ *self.variable_length_bytes.get_or_insert(0) += var_bytes;
+ }
Review Comment:
I think this should execute regardless of whether geo stats are enabled. The
`variable_length_bytes` are ultimately written to the `SizeStatistics` which
are useful even without min/max statistics.
##########
parquet/tests/geospatial.rs:
##########
@@ -0,0 +1,184 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#[cfg(all(feature = "arrow", feature = "geospatial"))]
+mod test {
+ use std::sync::Arc;
+
+ use arrow_array::{ArrayRef, BinaryArray, RecordBatch};
+ use arrow_schema::{DataType, Field, Schema};
+ use bytes::Bytes;
+ use parquet::{
+ arrow::{arrow_writer::ArrowWriterOptions, ArrowWriter},
+ basic::LogicalType,
+ data_type::{ByteArray, ByteArrayType},
+ file::{
+ properties::{EnabledStatistics, WriterProperties},
+ reader::{FileReader, SerializedFileReader},
+ writer::SerializedFileWriter,
+ },
+ geospatial::{bounding_box::BoundingBox,
statistics::GeospatialStatistics},
+ schema::types::{SchemaDescriptor, Type},
+ };
+
+ fn read_geo_statistics(buf: Vec<u8>) -> Vec<Option<GeospatialStatistics>> {
+ let b = Bytes::from(buf);
+ let reader = SerializedFileReader::new(b).unwrap();
+ reader
+ .metadata()
+ .row_groups()
+ .iter()
+ .map(|row_group| row_group.column(0).geo_statistics().cloned())
+ .collect()
+ }
+
+ #[test]
+ fn test_write_statistics_arrow() {
+ let arrow_schema = Arc::new(Schema::new(vec![Field::new(
+ "geom",
+ DataType::Binary,
+ true,
+ )]));
+ let batch = RecordBatch::try_new(
+ arrow_schema.clone(),
+ vec![wkb_array_xy([(1.0, 2.0), (11.0, 12.0)])],
+ )
+ .unwrap();
+ let expected_geometry_types = vec![1];
+ let expected_bounding_box = BoundingBox::new(1.0, 11.0, 2.0, 12.0);
+
+ let root = Type::group_type_builder("root")
+ .with_fields(vec![Type::primitive_type_builder(
+ "geo",
+ parquet::basic::Type::BYTE_ARRAY,
+ )
+ .with_logical_type(Some(LogicalType::Geometry))
+ .build()
+ .unwrap()
+ .into()])
+ .build()
+ .unwrap();
+ let schema = SchemaDescriptor::new(root.into());
+
+ let props = WriterProperties::builder()
+ .set_statistics_enabled(EnabledStatistics::Chunk)
+ .build();
+ let options = ArrowWriterOptions::new()
+ .with_parquet_schema(schema)
+ .with_properties(props);
+
+ let mut buf = Vec::with_capacity(1024);
+ let mut file_writer =
+ ArrowWriter::try_new_with_options(&mut buf, arrow_schema.clone(),
options).unwrap();
+ file_writer.write(&batch).unwrap();
+
+ let thrift_metadata = file_writer.finish().unwrap();
+ drop(file_writer);
+
+ // Check that statistics exist in thrift output
+ thrift_metadata.row_groups[0].columns[0]
Review Comment:
Heads up that when the thrift stuff merges this will no longer be a
`format::FileMetaData` but `file::metadata::ParquetMetaData`.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]