This is an automated email from the ASF dual-hosted git repository.

apitrou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/main by this push:
     new 3fe598ae4d GH-39666: [C++] Ensure CSV and JSON benchmarks present a 
bytes/s or items/s metric (#39764)
3fe598ae4d is described below

commit 3fe598ae4dfd7805ab05452dd5ed4b0d6c97d8d5
Author: Antoine Pitrou <[email protected]>
AuthorDate: Tue Jan 23 18:31:40 2024 +0100

    GH-39666: [C++] Ensure CSV and JSON benchmarks present a bytes/s or items/s 
metric (#39764)
    
    ### Rationale for this change
    
    Some of our microbenchmarks only present an iteration time in 
(nano,micro...)seconds. That is usually tedious to read and difficult to 
interpret.
    
    ### What changes are included in this PR?
    
    Ensure that benchmarks present a items/seconds and/or a bytes/seconds 
metric where that makes sense.
    
    ### Are these changes tested?
    
    Manually.
    
    ### Are there any user-facing changes?
    
    No.
    * Closes: #39666
    
    Authored-by: Antoine Pitrou <[email protected]>
    Signed-off-by: Antoine Pitrou <[email protected]>
---
 cpp/src/arrow/csv/writer_benchmark.cc | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/cpp/src/arrow/csv/writer_benchmark.cc 
b/cpp/src/arrow/csv/writer_benchmark.cc
index 54c0f50613..9baa00d48a 100644
--- a/cpp/src/arrow/csv/writer_benchmark.cc
+++ b/cpp/src/arrow/csv/writer_benchmark.cc
@@ -97,7 +97,7 @@ void BenchmarkWriteCsv(benchmark::State& state, const 
WriteOptions& options,
                        const RecordBatch& batch) {
   int64_t total_size = 0;
 
-  while (state.KeepRunning()) {
+  for (auto _ : state) {
     auto out = io::BufferOutputStream::Create().ValueOrDie();
     ABORT_NOT_OK(WriteCSV(batch, options, out.get()));
     auto buffer = out->Finish().ValueOrDie();
@@ -106,6 +106,7 @@ void BenchmarkWriteCsv(benchmark::State& state, const 
WriteOptions& options,
 
   // byte size of the generated csv dataset
   state.SetBytesProcessed(total_size);
+  state.SetItemsProcessed(state.iterations() * batch.num_columns() * 
batch.num_rows());
   state.counters["null_percent"] = static_cast<double>(state.range(0));
 }
 

Reply via email to