etseidl commented on code in PR #9163:
URL: https://github.com/apache/arrow-rs/pull/9163#discussion_r2687615260
##########
parquet/src/arrow/arrow_reader/mod.rs:
##########
@@ -546,25 +546,26 @@ impl ArrowReaderOptions {
/// the dictionary encoding by specifying a `Dictionary` type in the
schema hint:
///
/// ```
- /// use std::sync::Arc;
- /// use tempfile::tempfile;
- /// use arrow_array::{ArrayRef, RecordBatch, StringArray};
- /// use arrow_schema::{DataType, Field, Schema};
- /// use parquet::arrow::arrow_reader::{ArrowReaderOptions,
ParquetRecordBatchReaderBuilder};
- /// use parquet::arrow::ArrowWriter;
- ///
- /// // Write a Parquet file with string data
- /// let file = tempfile().unwrap();
- /// let schema = Arc::new(Schema::new(vec![
- /// Field::new("city", DataType::Utf8, false)
- /// ]));
- /// let cities = StringArray::from(vec!["Berlin", "Berlin", "Paris",
"Berlin", "Paris"]);
- /// let batch = RecordBatch::try_new(schema.clone(),
vec![Arc::new(cities)]).unwrap();
- ///
- /// let mut writer = ArrowWriter::try_new(file.try_clone().unwrap(),
batch.schema(), None).unwrap();
- /// writer.write(&batch).unwrap();
- /// writer.close().unwrap();
- ///
+ /// # use std::sync::Arc;
+ /// # use bytes::Bytes;
+ /// # use arrow_array::{ArrayRef, RecordBatch, StringArray};
+ /// # use arrow_schema::{DataType, Field, Schema};
+ /// # use parquet::arrow::arrow_reader::{ArrowReaderOptions,
ParquetRecordBatchReaderBuilder};
+ /// # use parquet::arrow::ArrowWriter;
+ /// #
+ /// # // Write a Parquet file with string data
Review Comment:
Same comment here
##########
parquet/src/arrow/arrow_reader/mod.rs:
##########
@@ -506,31 +506,31 @@ impl ArrowReaderOptions {
///
/// # Example
/// ```
- /// use std::io::Bytes;
- /// use std::sync::Arc;
- /// use tempfile::tempfile;
- /// use arrow_array::{ArrayRef, Int32Array, RecordBatch};
- /// use arrow_schema::{DataType, Field, Schema, TimeUnit};
- /// use parquet::arrow::arrow_reader::{ArrowReaderOptions,
ParquetRecordBatchReaderBuilder};
- /// use parquet::arrow::ArrowWriter;
- ///
- /// // Write data - schema is inferred from the data to be Int32
- /// let file = tempfile().unwrap();
- /// let batch = RecordBatch::try_from_iter(vec![
- /// ("col_1", Arc::new(Int32Array::from(vec![1, 2, 3])) as ArrayRef),
- /// ]).unwrap();
- /// let mut writer = ArrowWriter::try_new(file.try_clone().unwrap(),
batch.schema(), None).unwrap();
- /// writer.write(&batch).unwrap();
- /// writer.close().unwrap();
- ///
+ /// # use std::sync::Arc;
+ /// # use bytes::Bytes;
+ /// # use arrow_array::{ArrayRef, Int32Array, RecordBatch};
+ /// # use arrow_schema::{DataType, Field, Schema, TimeUnit};
+ /// # use parquet::arrow::arrow_reader::{ArrowReaderOptions,
ParquetRecordBatchReaderBuilder};
+ /// # use parquet::arrow::ArrowWriter;
+ /// #
+ /// # // Write data - schema is inferred from the data to be Int32
+ /// # let mut file = Vec::new();
+ /// # let batch = RecordBatch::try_from_iter(vec![
+ /// # ("col_1", Arc::new(Int32Array::from(vec![1, 2, 3])) as ArrayRef),
+ /// # ]).unwrap();
+ /// # let mut writer = ArrowWriter::try_new(&mut file, batch.schema(),
None).unwrap();
+ /// # writer.write(&batch).unwrap();
+ /// # writer.close().unwrap();
+ /// # let file = Bytes::from(file);
+ /// #
Review Comment:
I think this should still be visible in the docs...I find the rendered docs
now too terse. I think the examples benefit from knowing what's in the file
being read.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]