Hello community,

here is the log from the commit of package benchmark for openSUSE:Factory 
checked in at 2018-05-29 16:52:29
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/benchmark (Old)
 and      /work/SRC/openSUSE:Factory/.benchmark.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "benchmark"

Tue May 29 16:52:29 2018 rev:8 rq:612786 version:1.4.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/benchmark/benchmark.changes      2018-04-05 
15:35:17.588235972 +0200
+++ /work/SRC/openSUSE:Factory/.benchmark.new/benchmark.changes 2018-05-29 
16:52:29.919324382 +0200
@@ -1,0 +2,12 @@
+Mon May 28 21:43:24 UTC 2018 - [email protected]
+
+- Update to version 1.4.1
+  * Realign expectation that State::iterations() returns 0 before
+    the main benchmark loop begins.
+  * CMake error message fixes
+  * Enscripten check fix
+  * Bazel pthread linking
+  * Negative regexes
+  * gmock fix
+
+-------------------------------------------------------------------

Old:
----
  benchmark-1.4.0.tar.gz

New:
----
  benchmark-1.4.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ benchmark.spec ++++++
--- /var/tmp/diff_new_pack.MRddKv/_old  2018-05-29 16:52:30.587299805 +0200
+++ /var/tmp/diff_new_pack.MRddKv/_new  2018-05-29 16:52:30.595299511 +0200
@@ -19,12 +19,12 @@
 %define soname  lib%{name}
 %define sover   0
 Name:           benchmark
-Version:        1.4.0
+Version:        1.4.1
 Release:        0
 Summary:        A microbenchmark support library
 License:        Apache-2.0
 Group:          Development/Libraries/C and C++
-Url:            https://github.com/google/benchmark
+URL:            https://github.com/google/benchmark
 Source:         
https://github.com/google/benchmark/archive/v%{version}.tar.gz#/%{name}-%{version}.tar.gz
 BuildRequires:  cmake
 BuildRequires:  gcc-c++
@@ -64,6 +64,8 @@
 
 %install
 %cmake_install
+# dont ship debug stuff
+rm -rf %{_libexecdir}/debug
 
 %check
 # path needs to be exported otherwise unit tests will fail
@@ -77,15 +79,17 @@
 %license LICENSE
 %doc README.md AUTHORS
 %{_libdir}/%{soname}.so.%{sover}*
+%{_libdir}/%{soname}_main.so.%{sover}*
 
 %files devel
 %license LICENSE
 %doc README.md AUTHORS
-%dir %{_prefix}/lib/cmake/
-%dir %{_prefix}/lib/cmake/%{name}
-%{_prefix}/lib/cmake/%{name}/*.cmake
-%{_prefix}/lib/pkgconfig/%{name}.pc
+%dir %{_libexecdir}/cmake/
+%dir %{_libexecdir}/cmake/%{name}
+%{_libexecdir}/cmake/%{name}/*.cmake
+%{_libexecdir}/pkgconfig/%{name}.pc
 %{_libdir}/%{soname}.so
+%{_libdir}/%{soname}_main.so
 %{_includedir}/%{name}
 
 %changelog

++++++ benchmark-1.4.0.tar.gz -> benchmark-1.4.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/AUTHORS new/benchmark-1.4.1/AUTHORS
--- old/benchmark-1.4.0/AUTHORS 2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/AUTHORS 2018-05-25 12:18:58.000000000 +0200
@@ -13,6 +13,7 @@
 Carto
 Christopher Seymour <[email protected]>
 David Coeurjolly <[email protected]>
+Deniz Evrenci <[email protected]>
 Dirac Research 
 Dominik Czarnota <[email protected]>
 Eric Fiselier <[email protected]>
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/BUILD.bazel 
new/benchmark-1.4.1/BUILD.bazel
--- old/benchmark-1.4.0/BUILD.bazel     2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/BUILD.bazel     2018-05-25 12:18:58.000000000 +0200
@@ -1,14 +1,38 @@
 licenses(["notice"])
 
+config_setting(
+    name = "windows",
+    values = {
+        "cpu": "x64_windows",
+    },
+    visibility = [":__subpackages__"],
+)
+
 cc_library(
     name = "benchmark",
-    srcs = glob([
-        "src/*.cc",
-        "src/*.h",
-    ]),
+    srcs = glob(
+        [
+            "src/*.cc",
+            "src/*.h",
+        ],
+        exclude = ["src/benchmark_main.cc"],
+    ),
+    hdrs = ["include/benchmark/benchmark.h"],
+    linkopts = select({
+        ":windows": ["-DEFAULTLIB:shlwapi.lib"],
+        "//conditions:default": ["-pthread"],
+    }),
+    strip_include_prefix = "include",
+    visibility = ["//visibility:public"],
+)
+
+cc_library(
+    name = "benchmark_main",
+    srcs = ["src/benchmark_main.cc"],
     hdrs = ["include/benchmark/benchmark.h"],
     strip_include_prefix = "include",
     visibility = ["//visibility:public"],
+    deps = [":benchmark"],
 )
 
 cc_library(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/CMakeLists.txt 
new/benchmark-1.4.1/CMakeLists.txt
--- old/benchmark-1.4.0/CMakeLists.txt  2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/CMakeLists.txt  2018-05-25 12:18:58.000000000 +0200
@@ -216,7 +216,7 @@
     # linker flags appear before all linker inputs and -lc++ must appear after.
     list(APPEND BENCHMARK_CXX_LIBRARIES c++)
   else()
-    message(FATAL "-DBENCHMARK_USE_LIBCXX:BOOL=ON is not supported for 
compiler")
+    message(FATAL_ERROR "-DBENCHMARK_USE_LIBCXX:BOOL=ON is not supported for 
compiler")
   endif()
 endif(BENCHMARK_USE_LIBCXX)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/CONTRIBUTORS 
new/benchmark-1.4.1/CONTRIBUTORS
--- old/benchmark-1.4.0/CONTRIBUTORS    2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/CONTRIBUTORS    2018-05-25 12:18:58.000000000 +0200
@@ -28,6 +28,7 @@
 Chris Kennelly <[email protected]> <[email protected]>
 Christopher Seymour <[email protected]>
 David Coeurjolly <[email protected]>
+Deniz Evrenci <[email protected]>
 Dominic Hamon <[email protected]> <[email protected]>
 Dominik Czarnota <[email protected]>
 Eric Fiselier <[email protected]>
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/README.md 
new/benchmark-1.4.1/README.md
--- old/benchmark-1.4.0/README.md       2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/README.md       2018-05-25 12:18:58.000000000 +0200
@@ -23,7 +23,7 @@
 
 ```bash
 $ git clone https://github.com/google/benchmark.git
-# Benchmark requires GTest as a dependency. Add the source tree as a 
subdirectory.
+# Benchmark requires Google Test as a dependency. Add the source tree as a 
subdirectory.
 $ git clone https://github.com/google/googletest.git benchmark/googletest
 $ mkdir build && cd build
 $ cmake -G <generator> [options] ../benchmark
@@ -31,15 +31,13 @@
 $ make
 ```
 
-Note that Google Benchmark requires GTest to build and run the tests. This
-dependency can be provided three ways:
+Note that Google Benchmark requires Google Test to build and run the tests. 
This
+dependency can be provided two ways:
 
-* Checkout the GTest sources into `benchmark/googletest`.
+* Checkout the Google Test sources into `benchmark/googletest` as above.
 * Otherwise, if `-DBENCHMARK_DOWNLOAD_DEPENDENCIES=ON` is specified during
   configuration, the library will automatically download and build any required
   dependencies.
-* Otherwise, if nothing is done, CMake will use `find_package(GTest REQUIRED)`
-  to resolve the required GTest dependency.
 
 If you do not wish to build and run the tests, add 
`-DBENCHMARK_ENABLE_GTEST_TESTS=OFF`
 to `CMAKE_ARGS`.
@@ -61,6 +59,7 @@
 ```
 git clone https://github.com/google/benchmark.git
 cd benchmark
+git clone https://github.com/google/googletest.git
 mkdir build
 cd build
 cmake .. -DCMAKE_BUILD_TYPE=RELEASE
@@ -73,7 +72,7 @@
 sudo make install
 ```
 
-Now you have google/benchmark installed in your machine 
+Now you have google/benchmark installed in your machine
 Note: Don't forget to link to pthread library while building
 
 ## Stable and Experimental Library Versions
@@ -88,6 +87,11 @@
 this branch. However, this branch provides no stability guarantees and reserves
 the right to change and break the API at any time.
 
+##Prerequisite knowledge
+
+Before attempting to understand this framework one should ideally have some 
familiarity with the structure and format of the Google Test framework, upon 
which it is based. Documentation for Google Test, including a "Getting Started" 
(primer) guide, is available here:
+https://github.com/google/googletest/blob/master/googletest/docs/Documentation.md
+
 
 ## Example usage
 ### Basic usage
@@ -114,7 +118,10 @@
 BENCHMARK_MAIN();
 ```
 
-Don't forget to inform your linker to add benchmark library e.g. through 
`-lbenchmark` compilation flag.
+Don't forget to inform your linker to add benchmark library e.g. through 
+`-lbenchmark` compilation flag. Alternatively, you may leave out the 
+`BENCHMARK_MAIN();` at the end of the source file and link against 
+`-lbenchmark_main` to get the same default behavior.
 
 The benchmark library will reporting the timing for the code within the 
`for(...)` loop.
 
@@ -933,7 +940,7 @@
 
 # Known Issues
 
-### Windows
+### Windows with CMake
 
 * Users must manually link `shlwapi.lib`. Failure to do so may result
 in unresolved symbols.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/WORKSPACE 
new/benchmark-1.4.1/WORKSPACE
--- old/benchmark-1.4.0/WORKSPACE       2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/WORKSPACE       2018-05-25 12:18:58.000000000 +0200
@@ -1,9 +1,7 @@
 workspace(name = "com_github_google_benchmark")
 
-load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
-
-git_repository(
-    name = "com_google_googletest",
-    commit = "3f0cf6b62ad1eb50d8736538363d3580dd640c3e",  # HEAD
-    remote = "https://github.com/google/googletest";,
+http_archive(
+     name = "com_google_googletest",
+     urls = 
["https://github.com/google/googletest/archive/3f0cf6b62ad1eb50d8736538363d3580dd640c3e.zip";],
+     strip_prefix = "googletest-3f0cf6b62ad1eb50d8736538363d3580dd640c3e",
 )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/cmake/CXXFeatureCheck.cmake 
new/benchmark-1.4.1/cmake/CXXFeatureCheck.cmake
--- old/benchmark-1.4.0/cmake/CXXFeatureCheck.cmake     2018-04-04 
00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/cmake/CXXFeatureCheck.cmake     2018-05-25 
12:18:58.000000000 +0200
@@ -27,25 +27,27 @@
     return()
   endif()
 
-  message("-- Performing Test ${FEATURE}")
-  if(CMAKE_CROSSCOMPILING)
-    try_compile(COMPILE_${FEATURE}
-            ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp
-            CMAKE_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}
-            LINK_LIBRARIES ${BENCHMARK_CXX_LIBRARIES})
-    if(COMPILE_${FEATURE})
-      message(WARNING
-            "If you see build failures due to cross compilation, try setting 
HAVE_${VAR} to 0")
-      set(RUN_${FEATURE} 0)
+  if (NOT DEFINED COMPILE_${FEATURE})
+    message("-- Performing Test ${FEATURE}")
+    if(CMAKE_CROSSCOMPILING)
+      try_compile(COMPILE_${FEATURE}
+              ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp
+              CMAKE_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}
+              LINK_LIBRARIES ${BENCHMARK_CXX_LIBRARIES})
+      if(COMPILE_${FEATURE})
+        message(WARNING
+              "If you see build failures due to cross compilation, try setting 
HAVE_${VAR} to 0")
+        set(RUN_${FEATURE} 0)
+      else()
+        set(RUN_${FEATURE} 1)
+      endif()
     else()
-      set(RUN_${FEATURE} 1)
+      message("-- Performing Test ${FEATURE}")
+      try_run(RUN_${FEATURE} COMPILE_${FEATURE}
+              ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp
+              CMAKE_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}
+              LINK_LIBRARIES ${BENCHMARK_CXX_LIBRARIES})
     endif()
-  else()
-    message("-- Performing Test ${FEATURE}")
-    try_run(RUN_${FEATURE} COMPILE_${FEATURE}
-            ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp
-            CMAKE_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}
-            LINK_LIBRARIES ${BENCHMARK_CXX_LIBRARIES})
   endif()
 
   if(RUN_${FEATURE} EQUAL 0)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/cmake/GetGitVersion.cmake 
new/benchmark-1.4.1/cmake/GetGitVersion.cmake
--- old/benchmark-1.4.0/cmake/GetGitVersion.cmake       2018-04-04 
00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/cmake/GetGitVersion.cmake       2018-05-25 
12:18:58.000000000 +0200
@@ -21,6 +21,7 @@
 function(get_git_version var)
   if(GIT_EXECUTABLE)
       execute_process(COMMAND ${GIT_EXECUTABLE} describe --match 
"v[0-9]*.[0-9]*.[0-9]*" --abbrev=8
+          WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
           RESULT_VARIABLE status
           OUTPUT_VARIABLE GIT_VERSION
           ERROR_QUIET)
@@ -33,9 +34,11 @@
 
       # Work out if the repository is dirty
       execute_process(COMMAND ${GIT_EXECUTABLE} update-index -q --refresh
+          WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
           OUTPUT_QUIET
           ERROR_QUIET)
       execute_process(COMMAND ${GIT_EXECUTABLE} diff-index --name-only HEAD --
+          WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
           OUTPUT_VARIABLE GIT_DIFF_INDEX
           ERROR_QUIET)
       string(COMPARE NOTEQUAL "${GIT_DIFF_INDEX}" "" GIT_DIRTY)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/cmake/HandleGTest.cmake 
new/benchmark-1.4.1/cmake/HandleGTest.cmake
--- old/benchmark-1.4.0/cmake/HandleGTest.cmake 2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/cmake/HandleGTest.cmake 2018-05-25 12:18:58.000000000 
+0200
@@ -1,7 +1,5 @@
 
-macro(split_list listname)
-  string(REPLACE ";" " " ${listname} "${${listname}}")
-endmacro()
+include(split_list)
 
 macro(build_external_gtest)
   include(ExternalProject)
@@ -54,40 +52,62 @@
       )
 
   ExternalProject_Get_Property(googletest install_dir)
-
-  add_library(gtest UNKNOWN IMPORTED)
-  add_library(gtest_main UNKNOWN IMPORTED)
+  set(GTEST_INCLUDE_DIRS ${install_dir}/include)
+  file(MAKE_DIRECTORY ${GTEST_INCLUDE_DIRS})
 
   set(LIB_SUFFIX "${CMAKE_STATIC_LIBRARY_SUFFIX}")
   set(LIB_PREFIX "${CMAKE_STATIC_LIBRARY_PREFIX}")
-
   if("${GTEST_BUILD_TYPE}" STREQUAL "DEBUG")
     set(LIB_SUFFIX "d${CMAKE_STATIC_LIBRARY_SUFFIX}")
   endif()
-  file(MAKE_DIRECTORY ${install_dir}/include)
-  set_target_properties(gtest PROPERTIES
-    IMPORTED_LOCATION ${install_dir}/lib/${LIB_PREFIX}gtest${LIB_SUFFIX}
-    INTERFACE_INCLUDE_DIRECTORIES ${install_dir}/include
-  )
-  set_target_properties(gtest_main PROPERTIES
-    IMPORTED_LOCATION ${install_dir}/lib/${LIB_PREFIX}gtest_main${LIB_SUFFIX}
-    INTERFACE_INCLUDE_DIRECTORIES ${install_dir}/include
-  )
-  add_dependencies(gtest googletest)
-  add_dependencies(gtest_main googletest)
-  set(GTEST_BOTH_LIBRARIES gtest gtest_main)
-  set(GTEST_INCLUDE_DIRS ${install_dir}/include)
+
+  # Use gmock_main instead of gtest_main because it initializes gtest as well.
+  # Note: The libraries are listed in reverse order of their dependancies.
+  foreach(LIB gtest gmock gmock_main)
+    add_library(${LIB} UNKNOWN IMPORTED)
+    set_target_properties(${LIB} PROPERTIES
+      IMPORTED_LOCATION ${install_dir}/lib/${LIB_PREFIX}${LIB}${LIB_SUFFIX}
+      INTERFACE_INCLUDE_DIRECTORIES ${GTEST_INCLUDE_DIRS}
+      INTERFACE_LINK_LIBRARIES "${GTEST_BOTH_LIBRARIES}"
+    )
+    add_dependencies(${LIB} googletest)
+    list(APPEND GTEST_BOTH_LIBRARIES ${LIB})
+  endforeach()
 endmacro(build_external_gtest)
 
 if (BENCHMARK_ENABLE_GTEST_TESTS)
   if (IS_DIRECTORY ${CMAKE_SOURCE_DIR}/googletest)
+    set(GTEST_ROOT "${CMAKE_SOURCE_DIR}/googletest")
     set(INSTALL_GTEST OFF CACHE INTERNAL "")
     set(INSTALL_GMOCK OFF CACHE INTERNAL "")
     add_subdirectory(${CMAKE_SOURCE_DIR}/googletest)
-    set(GTEST_BOTH_LIBRARIES gtest gtest_main)
+    set(GTEST_BOTH_LIBRARIES gtest gmock gmock_main)
+    foreach(HEADER test mock)
+      # CMake 2.8 and older don't respect INTERFACE_INCLUDE_DIRECTORIES, so we
+      # have to add the paths ourselves.
+      set(HFILE g${HEADER}/g${HEADER}.h)
+      set(HPATH ${GTEST_ROOT}/google${HEADER}/include)
+      find_path(HEADER_PATH_${HEADER} ${HFILE}
+          NO_DEFAULT_PATHS
+          HINTS ${HPATH}
+      )
+      if (NOT HEADER_PATH_${HEADER})
+        message(FATAL_ERROR "Failed to find header ${HFILE} in ${HPATH}")
+      endif()
+      list(APPEND GTEST_INCLUDE_DIRS ${HEADER_PATH_${HEADER}})
+    endforeach()
   elseif(BENCHMARK_DOWNLOAD_DEPENDENCIES)
     build_external_gtest()
   else()
     find_package(GTest REQUIRED)
+    find_path(GMOCK_INCLUDE_DIRS gmock/gmock.h
+        HINTS ${GTEST_INCLUDE_DIRS})
+    if (NOT GMOCK_INCLUDE_DIRS)
+      message(FATAL_ERROR "Failed to find header gmock/gmock.h with hint 
${GTEST_INCLUDE_DIRS}")
+    endif()
+    set(GTEST_INCLUDE_DIRS ${GTEST_INCLUDE_DIRS} ${GMOCK_INCLUDE_DIRS})
+    # FIXME: We don't currently require the gmock library to build the tests,
+    # and it's likely we won't find it, so we don't try. As long as we've
+    # found the gmock/gmock.h header and gtest_main that should be good enough.
   endif()
 endif()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/cmake/split_list.cmake 
new/benchmark-1.4.1/cmake/split_list.cmake
--- old/benchmark-1.4.0/cmake/split_list.cmake  1970-01-01 01:00:00.000000000 
+0100
+++ new/benchmark-1.4.1/cmake/split_list.cmake  2018-05-25 12:18:58.000000000 
+0200
@@ -0,0 +1,3 @@
+macro(split_list listname)
+  string(REPLACE ";" " " ${listname} "${${listname}}")
+endmacro()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/include/benchmark/benchmark.h 
new/benchmark-1.4.1/include/benchmark/benchmark.h
--- old/benchmark-1.4.0/include/benchmark/benchmark.h   2018-04-04 
00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/include/benchmark/benchmark.h   2018-05-25 
12:18:58.000000000 +0200
@@ -292,7 +292,7 @@
 
 
 #if (!defined(__GNUC__) && !defined(__clang__)) || defined(__pnacl__) || \
-    defined(EMSCRIPTN)
+    defined(__EMSCRIPTEN__)
 # define BENCHMARK_HAS_NO_INLINE_ASSEMBLY
 #endif
 
@@ -385,7 +385,7 @@
 
 // BigOFunc is passed to a benchmark in order to specify the asymptotic
 // computational complexity for the benchmark.
-typedef double(BigOFunc)(int);
+typedef double(BigOFunc)(int64_t);
 
 // StatisticsFunc is passed to a benchmark in order to compute some descriptive
 // statistics over all the measurements of some type
@@ -575,7 +575,10 @@
 
   BENCHMARK_ALWAYS_INLINE
   size_t iterations() const {
-    return (max_iterations - total_iterations_ + batch_leftover_);
+    if (BENCHMARK_BUILTIN_EXPECT(!started_, false)) {
+      return 0;
+    }
+    return max_iterations - total_iterations_ + batch_leftover_;
   }
 
 private: // items we expect on the first cache line (ie 64 bytes of the struct)
@@ -1303,7 +1306,7 @@
     // Keep track of arguments to compute asymptotic complexity
     BigO complexity;
     BigOFunc* complexity_lambda;
-    int complexity_n;
+    int64_t complexity_n;
 
     // what statistics to compute from the measurements
     const std::vector<Statistics>* statistics;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/CMakeLists.txt 
new/benchmark-1.4.1/src/CMakeLists.txt
--- old/benchmark-1.4.0/src/CMakeLists.txt      2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/src/CMakeLists.txt      2018-05-25 12:18:58.000000000 
+0200
@@ -11,6 +11,7 @@
     *.cc
     ${PROJECT_SOURCE_DIR}/include/benchmark/*.h
     ${CMAKE_CURRENT_SOURCE_DIR}/*.h)
+list(FILTER SOURCE_FILES EXCLUDE REGEX "benchmark_main\\.cc")
 
 add_library(benchmark ${SOURCE_FILES})
 set_target_properties(benchmark PROPERTIES
@@ -39,6 +40,18 @@
   target_link_libraries(benchmark kstat)
 endif()
 
+# Benchmark main library
+add_library(benchmark_main "benchmark_main.cc")
+set_target_properties(benchmark_main PROPERTIES
+  OUTPUT_NAME "benchmark_main"
+  VERSION ${GENERIC_LIB_VERSION}
+  SOVERSION ${GENERIC_LIB_SOVERSION}
+)
+target_include_directories(benchmark PUBLIC
+    $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../include>
+    )
+target_link_libraries(benchmark_main benchmark)
+
 set(include_install_dir "include")
 set(lib_install_dir "lib/")
 set(bin_install_dir "bin/")
@@ -65,7 +78,7 @@
 if (BENCHMARK_ENABLE_INSTALL)
   # Install target (will install the library to specified CMAKE_INSTALL_PREFIX 
variable)
   install(
-    TARGETS benchmark
+    TARGETS benchmark benchmark_main
     EXPORT ${targets_export_name}
     ARCHIVE DESTINATION ${lib_install_dir}
     LIBRARY DESTINATION ${lib_install_dir}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/benchmark.cc 
new/benchmark-1.4.1/src/benchmark.cc
--- old/benchmark-1.4.0/src/benchmark.cc        2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/src/benchmark.cc        2018-05-25 12:18:58.000000000 
+0200
@@ -115,7 +115,7 @@
 
 BenchmarkReporter::Run CreateRunReport(
     const benchmark::internal::Benchmark::Instance& b,
-    const internal::ThreadManager::Result& results, size_t iters,
+    const internal::ThreadManager::Result& results,
     double seconds) {
   // Create report about this benchmark run.
   BenchmarkReporter::Run report;
@@ -124,8 +124,8 @@
   report.error_occurred = results.has_error_;
   report.error_message = results.error_message_;
   report.report_label = results.report_label_;
-  // Report the total iterations across all threads.
-  report.iterations = static_cast<int64_t>(iters) * b.threads;
+  // This is the total iterations across all threads.
+  report.iterations = results.iterations;
   report.time_unit = b.time_unit;
 
   if (!report.error_occurred) {
@@ -169,6 +169,7 @@
   {
     MutexLock l(manager->GetBenchmarkMutex());
     internal::ThreadManager::Result& results = manager->results;
+    results.iterations += st.iterations();
     results.cpu_time_used += timer.cpu_time_used();
     results.real_time_used += timer.real_time_used();
     results.manual_time_used += timer.manual_time_used();
@@ -236,18 +237,17 @@
       // Determine if this run should be reported; Either it has
       // run for a sufficient amount of time or because an error was reported.
       const bool should_report =  repetition_num > 0
-        || has_explicit_iteration_count // An exact iteration count was 
requested
+        || has_explicit_iteration_count  // An exact iteration count was 
requested
         || results.has_error_
-        || iters >= kMaxIterations
-        || seconds >= min_time // the elapsed time is large enough
+        || iters >= kMaxIterations  // No chance to try again, we hit the 
limit.
+        || seconds >= min_time  // the elapsed time is large enough
         // CPU time is specified but the elapsed real time greatly exceeds the
         // minimum time. Note that user provided timers are except from this
         // sanity check.
         || ((results.real_time_used >= 5 * min_time) && !b.use_manual_time);
 
       if (should_report) {
-        BenchmarkReporter::Run report =
-            CreateRunReport(b, results, iters, seconds);
+        BenchmarkReporter::Run report = CreateRunReport(b, results, seconds);
         if (!report.error_occurred && b.complexity != oNone)
           complexity_reports->push_back(report);
         reports.push_back(report);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/benchmark_main.cc 
new/benchmark-1.4.1/src/benchmark_main.cc
--- old/benchmark-1.4.0/src/benchmark_main.cc   1970-01-01 01:00:00.000000000 
+0100
+++ new/benchmark-1.4.1/src/benchmark_main.cc   2018-05-25 12:18:58.000000000 
+0200
@@ -0,0 +1,17 @@
+// Copyright 2018 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/benchmark.h"
+
+BENCHMARK_MAIN();
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/benchmark_register.cc 
new/benchmark-1.4.1/src/benchmark_register.cc
--- old/benchmark-1.4.0/src/benchmark_register.cc       2018-04-04 
00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/src/benchmark_register.cc       2018-05-25 
12:18:58.000000000 +0200
@@ -77,7 +77,7 @@
 
   // Extract the list of benchmark instances that match the specified
   // regular expression.
-  bool FindBenchmarks(const std::string& re,
+  bool FindBenchmarks(std::string re,
                       std::vector<Benchmark::Instance>* benchmarks,
                       std::ostream* Err);
 
@@ -107,13 +107,18 @@
 }
 
 bool BenchmarkFamilies::FindBenchmarks(
-    const std::string& spec, std::vector<Benchmark::Instance>* benchmarks,
+    std::string spec, std::vector<Benchmark::Instance>* benchmarks,
     std::ostream* ErrStream) {
   CHECK(ErrStream);
   auto& Err = *ErrStream;
   // Make regular expression out of command-line flag
   std::string error_msg;
   Regex re;
+  bool isNegativeFilter = false;
+  if(spec[0] == '-') {
+      spec.replace(0, 1, "");
+      isNegativeFilter = true;
+  }
   if (!re.Init(spec, &error_msg)) {
     Err << "Could not compile benchmark re: " << error_msg << std::endl;
     return false;
@@ -199,7 +204,8 @@
           instance.name += StrFormat("/threads:%d", instance.threads);
         }
 
-        if (re.Match(instance.name)) {
+        if ((re.Match(instance.name) && !isNegativeFilter) ||
+            (!re.Match(instance.name) && isNegativeFilter)) {
           instance.last_benchmark_instance = (&args == &family->args_.back());
           benchmarks->push_back(std::move(instance));
         }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/complexity.cc 
new/benchmark-1.4.1/src/complexity.cc
--- old/benchmark-1.4.0/src/complexity.cc       2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/src/complexity.cc       2018-05-25 12:18:58.000000000 
+0200
@@ -28,18 +28,18 @@
 BigOFunc* FittingCurve(BigO complexity) {
   switch (complexity) {
     case oN:
-      return [](int n) -> double { return n; };
+      return [](int64_t n) -> double { return static_cast<double>(n); };
     case oNSquared:
-      return [](int n) -> double { return std::pow(n, 2); };
+      return [](int64_t n) -> double { return std::pow(n, 2); };
     case oNCubed:
-      return [](int n) -> double { return std::pow(n, 3); };
+      return [](int64_t n) -> double { return std::pow(n, 3); };
     case oLogN:
-      return [](int n) { return log2(n); };
+      return [](int64_t n) { return log2(n); };
     case oNLogN:
-      return [](int n) { return n * log2(n); };
+      return [](int64_t n) { return n * log2(n); };
     case o1:
     default:
-      return [](int) { return 1.0; };
+      return [](int64_t) { return 1.0; };
   }
 }
 
@@ -68,12 +68,12 @@
 // given by the lambda expression.
 //   - n             : Vector containing the size of the benchmark tests.
 //   - time          : Vector containing the times for the benchmark tests.
-//   - fitting_curve : lambda expression (e.g. [](int n) {return n; };).
+//   - fitting_curve : lambda expression (e.g. [](int64_t n) {return n; };).
 
 // For a deeper explanation on the algorithm logic, look the README file at
 // http://github.com/ismaelJimenez/Minimal-Cpp-Least-Squared-Fit
 
-LeastSq MinimalLeastSq(const std::vector<int>& n,
+LeastSq MinimalLeastSq(const std::vector<int64_t>& n,
                        const std::vector<double>& time,
                        BigOFunc* fitting_curve) {
   double sigma_gn = 0.0;
@@ -117,7 +117,7 @@
 //   - complexity : If different than oAuto, the fitting curve will stick to
 //                  this one. If it is oAuto, it will be calculated the best
 //                  fitting curve.
-LeastSq MinimalLeastSq(const std::vector<int>& n,
+LeastSq MinimalLeastSq(const std::vector<int64_t>& n,
                        const std::vector<double>& time, const BigO complexity) 
{
   CHECK_EQ(n.size(), time.size());
   CHECK_GE(n.size(), 2);  // Do not compute fitting curve is less than two
@@ -157,7 +157,7 @@
   if (reports.size() < 2) return results;
 
   // Accumulators.
-  std::vector<int> n;
+  std::vector<int64_t> n;
   std::vector<double> real_time;
   std::vector<double> cpu_time;
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/internal_macros.h 
new/benchmark-1.4.1/src/internal_macros.h
--- old/benchmark-1.4.0/src/internal_macros.h   2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/src/internal_macros.h   2018-05-25 12:18:58.000000000 
+0200
@@ -51,11 +51,13 @@
   #define BENCHMARK_OS_FREEBSD 1
 #elif defined(__NetBSD__)
   #define BENCHMARK_OS_NETBSD 1
+#elif defined(__OpenBSD__)
+  #define BENCHMARK_OS_OPENBSD 1
 #elif defined(__linux__)
   #define BENCHMARK_OS_LINUX 1
 #elif defined(__native_client__)
   #define BENCHMARK_OS_NACL 1
-#elif defined(EMSCRIPTEN)
+#elif defined(__EMSCRIPTEN__)
   #define BENCHMARK_OS_EMSCRIPTEN 1
 #elif defined(__rtems__)
   #define BENCHMARK_OS_RTEMS 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/statistics.cc 
new/benchmark-1.4.1/src/statistics.cc
--- old/benchmark-1.4.0/src/statistics.cc       2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/src/statistics.cc       2018-05-25 12:18:58.000000000 
+0200
@@ -36,16 +36,19 @@
 
 double StatisticsMedian(const std::vector<double>& v) {
   if (v.size() < 3) return StatisticsMean(v);
-  std::vector<double> partial;
-  // we need roundDown(count/2)+1 slots
-  partial.resize(1 + (v.size() / 2));
-  std::partial_sort_copy(v.begin(), v.end(), partial.begin(), partial.end());
-  // did we have odd number of samples?
-  // if yes, then the last element of partially-sorted vector is the median
-  // it no, then the average of the last two elements is the median
+  std::vector<double> copy(v);
+
+  auto center = copy.begin() + v.size() / 2;
+  std::nth_element(copy.begin(), center, copy.end());
+
+  // did we have an odd number of samples?
+  // if yes, then center is the median
+  // it no, then we are looking for the average between center and the value 
before
   if(v.size() % 2 == 1)
-    return partial.back();
-  return (partial[partial.size() - 2] + partial[partial.size() - 1]) / 2.0;
+    return *center;
+  auto center2 = copy.begin() + v.size() / 2 - 1;
+  std::nth_element(copy.begin(), center2, copy.end());
+  return (*center + *center2) / 2.0;
 }
 
 // Return the sum of the squares of this sample set
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/sysinfo.cc 
new/benchmark-1.4.1/src/sysinfo.cc
--- old/benchmark-1.4.0/src/sysinfo.cc  2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/src/sysinfo.cc  2018-05-25 12:18:58.000000000 +0200
@@ -28,7 +28,7 @@
 #include <sys/types.h>  // this header must be included before 'sys/sysctl.h' 
to avoid compilation error on FreeBSD
 #include <unistd.h>
 #if defined BENCHMARK_OS_FREEBSD || defined BENCHMARK_OS_MACOSX || \
-    defined BENCHMARK_OS_NETBSD
+    defined BENCHMARK_OS_NETBSD || defined BENCHMARK_OS_OPENBSD
 #define BENCHMARK_HAS_SYSCTL
 #include <sys/sysctl.h>
 #endif
@@ -136,6 +136,26 @@
 };
 
 ValueUnion GetSysctlImp(std::string const& Name) {
+#if defined BENCHMARK_OS_OPENBSD
+  int mib[2];
+
+  mib[0] = CTL_HW;
+  if ((Name == "hw.ncpu") || (Name == "hw.cpuspeed")){
+    ValueUnion buff(sizeof(int));
+
+    if (Name == "hw.ncpu") {
+      mib[1] = HW_NCPU;
+    } else {
+      mib[1] = HW_CPUSPEED;
+    }
+
+    if (sysctl(mib, 2, buff.data(), &buff.Size, nullptr, 0) == -1) {
+      return ValueUnion();
+    }
+    return buff;
+  }
+  return ValueUnion();
+#else
   size_t CurBuffSize = 0;
   if (sysctlbyname(Name.c_str(), nullptr, &CurBuffSize, nullptr, 0) == -1)
     return ValueUnion();
@@ -144,6 +164,7 @@
   if (sysctlbyname(Name.c_str(), buff.data(), &buff.Size, nullptr, 0) == 0)
     return buff;
   return ValueUnion();
+#endif
 }
 
 BENCHMARK_MAYBE_UNUSED
@@ -488,12 +509,17 @@
   constexpr auto* FreqStr =
 #if defined(BENCHMARK_OS_FREEBSD) || defined(BENCHMARK_OS_NETBSD)
       "machdep.tsc_freq";
+#elif defined BENCHMARK_OS_OPENBSD
+      "hw.cpuspeed";
 #else
       "hw.cpufrequency";
 #endif
   unsigned long long hz = 0;
+#if defined BENCHMARK_OS_OPENBSD
+  if (GetSysctl(FreqStr, &hz)) return hz * 1000000;
+#else
   if (GetSysctl(FreqStr, &hz)) return hz;
-
+#endif
   fprintf(stderr, "Unable to determine clock rate from sysctl: %s: %s\n",
           FreqStr, strerror(errno));
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/thread_manager.h 
new/benchmark-1.4.1/src/thread_manager.h
--- old/benchmark-1.4.0/src/thread_manager.h    2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/src/thread_manager.h    2018-05-25 12:18:58.000000000 
+0200
@@ -1,6 +1,9 @@
 #ifndef BENCHMARK_THREAD_MANAGER_H
 #define BENCHMARK_THREAD_MANAGER_H
 
+#include <atomic>
+
+#include "benchmark/benchmark.h"
 #include "mutex.h"
 
 namespace benchmark {
@@ -35,12 +38,13 @@
 
  public:
   struct Result {
+    int64_t iterations = 0;
     double real_time_used = 0;
     double cpu_time_used = 0;
     double manual_time_used = 0;
     int64_t bytes_processed = 0;
     int64_t items_processed = 0;
-    int complexity_n = 0;
+    int64_t complexity_n = 0;
     std::string report_label_;
     std::string error_message_;
     bool has_error_ = false;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/src/timers.cc 
new/benchmark-1.4.1/src/timers.cc
--- old/benchmark-1.4.0/src/timers.cc   2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/src/timers.cc   2018-05-25 12:18:58.000000000 +0200
@@ -193,7 +193,6 @@
         std::strftime(storage, sizeof(storage), "%x %X", ::localtime(&now));
 #else
     std::tm timeinfo;
-    std::memset(&timeinfo, 0, sizeof(std::tm));
     ::localtime_r(&now, &timeinfo);
     written = std::strftime(storage, sizeof(storage), "%F %T", &timeinfo);
 #endif
@@ -202,7 +201,6 @@
     written = std::strftime(storage, sizeof(storage), "%x %X", ::gmtime(&now));
 #else
     std::tm timeinfo;
-    std::memset(&timeinfo, 0, sizeof(std::tm));
     ::gmtime_r(&now, &timeinfo);
     written = std::strftime(storage, sizeof(storage), "%F %T", &timeinfo);
 #endif
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/test/AssemblyTests.cmake 
new/benchmark-1.4.1/test/AssemblyTests.cmake
--- old/benchmark-1.4.0/test/AssemblyTests.cmake        2018-04-04 
00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/test/AssemblyTests.cmake        2018-05-25 
12:18:58.000000000 +0200
@@ -1,4 +1,5 @@
 
+include(split_list)
 
 set(ASM_TEST_FLAGS "")
 check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/test/BUILD 
new/benchmark-1.4.1/test/BUILD
--- old/benchmark-1.4.0/test/BUILD      2018-04-04 00:12:47.000000000 +0200
+++ new/benchmark-1.4.1/test/BUILD      2018-05-25 12:18:58.000000000 +0200
@@ -53,5 +53,13 @@
   # FIXME: Add support for assembly tests to bazel.
   # See Issue #556
   # https://github.com/google/benchmark/issues/556
-  ) for test_src in glob(["*test.cc"], exclude = ["*_assembly_test.cc"])
+  ) for test_src in glob(["*test.cc"], exclude = ["*_assembly_test.cc", 
"link_main_test.cc"])
 ]
+
+cc_test(
+    name = "link_main_test",
+    size = "small",
+    srcs = ["link_main_test.cc"],
+    copts = TEST_COPTS,
+    deps = ["//:benchmark_main"],
+)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/test/CMakeLists.txt 
new/benchmark-1.4.1/test/CMakeLists.txt
--- old/benchmark-1.4.0/test/CMakeLists.txt     2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/test/CMakeLists.txt     2018-05-25 12:18:58.000000000 
+0200
@@ -41,6 +41,10 @@
   target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT})
 endmacro(compile_benchmark_test)
 
+macro(compile_benchmark_test_with_main name)
+  add_executable(${name} "${name}.cc")
+  target_link_libraries(${name} benchmark_main)
+endmacro(compile_benchmark_test_with_main)
 
 macro(compile_output_test name)
   add_executable(${name} "${name}.cc" output_test.h)
@@ -59,14 +63,23 @@
 endmacro(add_filter_test)
 
 add_filter_test(filter_simple "Foo" 3)
+add_filter_test(filter_simple_negative "-Foo" 2)
 add_filter_test(filter_suffix "BM_.*" 4)
+add_filter_test(filter_suffix_negative "-BM_.*" 1)
 add_filter_test(filter_regex_all ".*" 5)
+add_filter_test(filter_regex_all_negative "-.*" 0)
 add_filter_test(filter_regex_blank "" 5)
+add_filter_test(filter_regex_blank_negative "-" 0)
 add_filter_test(filter_regex_none "monkey" 0)
+add_filter_test(filter_regex_none_negative "-monkey" 5)
 add_filter_test(filter_regex_wildcard ".*Foo.*" 3)
+add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2)
 add_filter_test(filter_regex_begin "^BM_.*" 4)
+add_filter_test(filter_regex_begin_negative "-^BM_.*" 1)
 add_filter_test(filter_regex_begin2 "^N" 1)
+add_filter_test(filter_regex_begin2_negative "-^N" 4)
 add_filter_test(filter_regex_end ".*Ba$" 1)
+add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
 
 compile_benchmark_test(options_test)
 add_test(options_benchmarks options_test --benchmark_min_time=0.01)
@@ -100,6 +113,9 @@
 compile_benchmark_test(multiple_ranges_test)
 add_test(multiple_ranges_test multiple_ranges_test --benchmark_min_time=0.01)
 
+compile_benchmark_test_with_main(link_main_test)
+add_test(link_main_test link_main_test --benchmark_min_time=0.01)
+
 compile_output_test(reporter_output_test)
 add_test(reporter_output_test reporter_output_test --benchmark_min_time=0.01)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/test/basic_test.cc 
new/benchmark-1.4.1/test/basic_test.cc
--- old/benchmark-1.4.0/test/basic_test.cc      2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/test/basic_test.cc      2018-05-25 12:18:58.000000000 
+0200
@@ -99,6 +99,7 @@
 
 void BM_KeepRunning(benchmark::State& state) {
   size_t iter_count = 0;
+  assert(iter_count == state.iterations());
   while (state.KeepRunning()) {
     ++iter_count;
   }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/test/complexity_test.cc 
new/benchmark-1.4.1/test/complexity_test.cc
--- old/benchmark-1.4.0/test/complexity_test.cc 2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/test/complexity_test.cc 2018-05-25 12:18:58.000000000 
+0200
@@ -55,7 +55,7 @@
 }
 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1);
 BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity();
-BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity([](int) {
+BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity([](int64_t) {
   return 1.0;
 });
 
@@ -106,7 +106,7 @@
 BENCHMARK(BM_Complexity_O_N)
     ->RangeMultiplier(2)
     ->Range(1 << 10, 1 << 16)
-    ->Complexity([](int n) -> double { return n; });
+    ->Complexity([](int64_t n) -> double { return n; });
 BENCHMARK(BM_Complexity_O_N)
     ->RangeMultiplier(2)
     ->Range(1 << 10, 1 << 16)
@@ -141,7 +141,7 @@
 BENCHMARK(BM_Complexity_O_N_log_N)
     ->RangeMultiplier(2)
     ->Range(1 << 10, 1 << 16)
-    ->Complexity([](int n) { return n * log2(n); });
+    ->Complexity([](int64_t n) { return n * log2(n); });
 BENCHMARK(BM_Complexity_O_N_log_N)
     ->RangeMultiplier(2)
     ->Range(1 << 10, 1 << 16)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/test/link_main_test.cc 
new/benchmark-1.4.1/test/link_main_test.cc
--- old/benchmark-1.4.0/test/link_main_test.cc  1970-01-01 01:00:00.000000000 
+0100
+++ new/benchmark-1.4.1/test/link_main_test.cc  2018-05-25 12:18:58.000000000 
+0200
@@ -0,0 +1,8 @@
+#include "benchmark/benchmark.h"
+
+void BM_empty(benchmark::State& state) {
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+}
+BENCHMARK(BM_empty);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/tools/compare.py 
new/benchmark-1.4.1/tools/compare.py
--- old/benchmark-1.4.0/tools/compare.py        2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/tools/compare.py        2018-05-25 12:18:58.000000000 
+0200
@@ -138,6 +138,9 @@
     # Parse the command line flags
     parser = create_parser()
     args, unknown_args = parser.parse_known_args()
+    if args.mode is None:
+      parser.print_help()
+      exit(1)
     assert not unknown_args
     benchmark_options = args.benchmark_options
 
@@ -175,6 +178,7 @@
     else:
         # should never happen
         print("Unrecognized mode of operation: '%s'" % args.mode)
+        parser.print_help()
         exit(1)
 
     check_inputs(test_baseline, test_contender, benchmark_options)
@@ -218,8 +222,8 @@
                 os.path.realpath(__file__)),
             'gbench',
             'Inputs')
-        self.testInput0 = os.path.join(testInputs, 'test_baseline_run1.json')
-        self.testInput1 = os.path.join(testInputs, 'test_baseline_run2.json')
+        self.testInput0 = os.path.join(testInputs, 'test1_run1.json')
+        self.testInput1 = os.path.join(testInputs, 'test1_run2.json')
 
     def test_benchmarks_basic(self):
         parsed = self.parser.parse_args(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/benchmark-1.4.0/tools/gbench/report.py 
new/benchmark-1.4.1/tools/gbench/report.py
--- old/benchmark-1.4.0/tools/gbench/report.py  2018-04-04 00:12:47.000000000 
+0200
+++ new/benchmark-1.4.1/tools/gbench/report.py  2018-05-25 12:18:58.000000000 
+0200
@@ -191,7 +191,7 @@
         json2 = filter_benchmark(json, "BM_O.e", ".")
         output_lines_with_header = generate_difference_report(json1, json2, 
use_color=False)
         output_lines = output_lines_with_header[2:]
-        print "\n"
+        print("\n")
         print("\n".join(output_lines_with_header))
         self.assertEqual(len(output_lines), len(expect_lines))
         for i in range(0, len(output_lines)):


Reply via email to