This is an automated email from the ASF dual-hosted git repository.
cpcloud pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git
The following commit(s) were added to refs/heads/master by this push:
new c71a565 ARROW-2117: [C++] Update codebase / CI toolchain for clang 5.0
c71a565 is described below
commit c71a56527726f3cab58b37ad61a2129662f8a4fb
Author: Wes McKinney <[email protected]>
AuthorDate: Fri Feb 16 15:53:41 2018 -0500
ARROW-2117: [C++] Update codebase / CI toolchain for clang 5.0
LLVM 5.0 found a number of code tidying issues that I fixed here. It also
wants us to use `override` with our virtual destructor implementations, which I
wasn't aware of whether is a best practice or not.
cc @xhochy @cpcloud for review
Author: Wes McKinney <[email protected]>
Closes #1597 from wesm/ARROW-2117 and squashes the following commits:
28f384c0 [Wes McKinney] Use clang-5 in integration test builds also
f76c2880 [Wes McKinney] Mark rest of virtual dtors as override
03d03cd6 [Wes McKinney] Upgrade clang and clang-format use to LLVM 5.0,
account for new compiler warnings
---
.travis.yml | 10 ++++-----
ci/travis_install_clang_tools.sh | 4 ++--
cpp/CMakeLists.txt | 2 +-
cpp/cmake_modules/SetupCxxFlags.cmake | 6 ++++-
cpp/cmake_modules/ThirdpartyToolchain.cmake | 11 ++++++++-
cpp/src/arrow/array-test.cc | 15 ++++++-------
cpp/src/arrow/buffer.h | 2 +-
cpp/src/arrow/builder.cc | 9 ++++----
cpp/src/arrow/builder.h | 4 ++--
cpp/src/arrow/compare.cc | 5 ++---
cpp/src/arrow/compute/compute-test.cc | 16 ++++++-------
cpp/src/arrow/compute/kernels/cast.cc | 22 ++++++++++--------
cpp/src/arrow/compute/kernels/hash.cc | 2 +-
cpp/src/arrow/io/file.h | 6 ++---
cpp/src/arrow/io/hdfs.h | 6 ++---
cpp/src/arrow/io/io-file-test.cc | 4 ++--
cpp/src/arrow/io/io-hdfs-test.cc | 2 +-
cpp/src/arrow/ipc/feather.cc | 7 +++---
cpp/src/arrow/ipc/reader.h | 2 +-
cpp/src/arrow/ipc/writer.cc | 8 +++----
cpp/src/arrow/ipc/writer.h | 6 ++---
cpp/src/arrow/memory_pool.cc | 2 +-
cpp/src/arrow/memory_pool.h | 2 +-
cpp/src/arrow/python/CMakeLists.txt | 7 ++++++
cpp/src/arrow/python/io.h | 4 ++--
cpp/src/arrow/python/numpy_to_arrow.cc | 10 ++++-----
cpp/src/arrow/table.h | 2 +-
cpp/src/arrow/test-common.h | 10 +--------
cpp/src/arrow/type.cc | 4 ++--
cpp/src/arrow/util/bit-util.h | 35 ++++++++++++-----------------
cpp/src/arrow/util/compression_zlib.h | 2 +-
cpp/src/arrow/util/io-util.h | 4 ++--
cpp/src/plasma/plasma.cc | 5 +++--
cpp/src/plasma/plasma.h | 5 +++--
34 files changed, 126 insertions(+), 115 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index d591a99..9ef16b5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -46,7 +46,7 @@ matrix:
allow_failures:
- jdk: oraclejdk9
include:
- # C++ & Python w/ clang 4.0
+ # C++ & Python w/ clang 5.0
- compiler: gcc
language: cpp
os: linux
@@ -59,8 +59,8 @@ matrix:
- export ARROW_TRAVIS_ORC=1
- export ARROW_TRAVIS_CLANG_FORMAT=1
- export ARROW_BUILD_WARNING_LEVEL=CHECKIN
- - export CC="clang-4.0"
- - export CXX="clang++-4.0"
+ - export CC="clang-5.0"
+ - export CXX="clang++-5.0"
- $TRAVIS_BUILD_DIR/ci/travis_install_clang_tools.sh
- $TRAVIS_BUILD_DIR/ci/travis_lint.sh
- if [ $ARROW_CI_CPP_AFFECTED == "1" ]; then
$TRAVIS_BUILD_DIR/ci/travis_before_script_cpp.sh; fi
@@ -124,8 +124,8 @@ matrix:
before_script:
- eval `python $TRAVIS_BUILD_DIR/ci/travis_detect_changes.py`
- source $TRAVIS_BUILD_DIR/ci/travis_install_clang_tools.sh
- - export CC="clang-4.0"
- - export CXX="clang++-4.0"
+ - export CC="clang-5.0"
+ - export CXX="clang++-5.0"
- nvm install node
- if [ $ARROW_CI_INTEGRATION_AFFECTED == "1" ]; then
$TRAVIS_BUILD_DIR/ci/travis_before_script_js.sh; fi
- if [ $ARROW_CI_INTEGRATION_AFFECTED == "1" ]; then
$TRAVIS_BUILD_DIR/ci/travis_before_script_cpp.sh; fi
diff --git a/ci/travis_install_clang_tools.sh b/ci/travis_install_clang_tools.sh
index d0108ad..630c09e 100755
--- a/ci/travis_install_clang_tools.sh
+++ b/ci/travis_install_clang_tools.sh
@@ -19,6 +19,6 @@
wget -O - http://llvm.org/apt/llvm-snapshot.gpg.key|sudo apt-key add -
sudo apt-add-repository -y \
- "deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-4.0 main"
+ "deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-5.0 main"
sudo apt-get update -q
-sudo apt-get install -q clang-4.0 clang-format-4.0 clang-tidy-4.0
+sudo apt-get install -q clang-5.0 clang-format-5.0 clang-tidy-5.0
diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt
index 62c8e65..42c1ec8 100644
--- a/cpp/CMakeLists.txt
+++ b/cpp/CMakeLists.txt
@@ -48,7 +48,7 @@ set(ARROW_ABI_VERSION "${ARROW_SO_VERSION}.0.0")
set(BUILD_SUPPORT_DIR "${CMAKE_SOURCE_DIR}/build-support")
-set(CLANG_FORMAT_VERSION "4.0")
+set(CLANG_FORMAT_VERSION "5.0")
find_package(ClangTools)
if ("$ENV{CMAKE_EXPORT_COMPILE_COMMANDS}" STREQUAL "1" OR CLANG_TIDY_FOUND)
# Generate a Clang compile_commands.json "compilation database" file for use
diff --git a/cpp/cmake_modules/SetupCxxFlags.cmake
b/cpp/cmake_modules/SetupCxxFlags.cmake
index d901bde..e21c549 100644
--- a/cpp/cmake_modules/SetupCxxFlags.cmake
+++ b/cpp/cmake_modules/SetupCxxFlags.cmake
@@ -91,7 +91,7 @@ if ("${UPPERCASE_BUILD_WARNING_LEVEL}" STREQUAL "CHECKIN")
elseif ("${COMPILER_FAMILY}" STREQUAL "clang")
set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Weverything -Wno-c++98-compat \
-Wno-c++98-compat-pedantic -Wno-deprecated -Wno-weak-vtables -Wno-padded \
--Wno-comma -Wno-unused-parameter -Wno-undef \
+-Wno-comma -Wno-unused-parameter -Wno-unused-template -Wno-undef \
-Wno-shadow -Wno-switch-enum -Wno-exit-time-destructors \
-Wno-global-constructors -Wno-weak-template-vtables
-Wno-undefined-reinterpret-cast \
-Wno-implicit-fallthrough -Wno-unreachable-code-return \
@@ -117,6 +117,10 @@ if ("${UPPERCASE_BUILD_WARNING_LEVEL}" STREQUAL "CHECKIN")
set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-undefined-func-template")
endif()
+ if ("${COMPILER_VERSION}" VERSION_GREATER "4.0")
+ set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS}
-Wno-zero-as-null-pointer-constant")
+ endif()
+
# Treat all compiler warnings as errors
set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unknown-warning-option
-Werror")
elseif ("${COMPILER_FAMILY}" STREQUAL "gcc")
diff --git a/cpp/cmake_modules/ThirdpartyToolchain.cmake
b/cpp/cmake_modules/ThirdpartyToolchain.cmake
index 193b162..3511d40 100644
--- a/cpp/cmake_modules/ThirdpartyToolchain.cmake
+++ b/cpp/cmake_modules/ThirdpartyToolchain.cmake
@@ -909,12 +909,21 @@ if (ARROW_ORC)
set(ORC_INCLUDE_DIR "${ORC_PREFIX}/include")
set(ORC_STATIC_LIB
"${ORC_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}orc${CMAKE_STATIC_LIBRARY_SUFFIX}")
+ if ("${COMPILER_FAMILY}" STREQUAL "clang")
+ if ("${COMPILER_VERSION}" VERSION_GREATER "4.0")
+ set(ORC_CMAKE_CXX_FLAGS " -Wno-zero-as-null-pointer-constant \
+-Wno-inconsistent-missing-destructor-override ")
+ endif()
+ endif()
+
+ set(ORC_CMAKE_CXX_FLAGS "${EP_CXX_FLAGS} ${ORC_CMAKE_CXX_FLAGS}")
+
# Since LZ4 isn't installed, the header file is in ${LZ4_HOME}/lib instead of
# ${LZ4_HOME}/include, which forces us to specify the include directory
# manually as well.
set (ORC_CMAKE_ARGS -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX=${ORC_PREFIX}
- -DCMAKE_CXX_FLAGS=${EP_CXX_FLAGS}
+ -DCMAKE_CXX_FLAGS=${ORC_CMAKE_CXX_FLAGS}
-DBUILD_LIBHDFSPP=OFF
-DBUILD_JAVA=OFF
-DBUILD_TOOLS=OFF
diff --git a/cpp/src/arrow/array-test.cc b/cpp/src/arrow/array-test.cc
index fa64d46..d68b04d 100644
--- a/cpp/src/arrow/array-test.cc
+++ b/cpp/src/arrow/array-test.cc
@@ -193,11 +193,13 @@ TEST_F(TestArray, TestCopy) {}
// Primitive type tests
TEST_F(TestBuilder, TestReserve) {
- ASSERT_OK(builder_->Init(10));
- ASSERT_EQ(2, builder_->null_bitmap()->size());
+ UInt8Builder builder(pool_);
- ASSERT_OK(builder_->Reserve(30));
- ASSERT_EQ(4, builder_->null_bitmap()->size());
+ ASSERT_OK(builder.Init(10));
+ ASSERT_EQ(2, builder.null_bitmap()->size());
+
+ ASSERT_OK(builder.Reserve(30));
+ ASSERT_EQ(4, builder.null_bitmap()->size());
}
template <typename Attrs>
@@ -266,7 +268,6 @@ class TestPrimitiveBuilder : public TestBuilder {
int64_t FlipValue(int64_t value) const { return ~value; }
protected:
- std::shared_ptr<DataType> type_;
std::unique_ptr<BuilderType> builder_;
std::unique_ptr<BuilderType> builder_nn_;
@@ -1287,7 +1288,7 @@ TEST_F(TestFWBinaryArray, Builder) {
std::shared_ptr<Array> result;
- auto CheckResult = [&length, &is_valid, &raw_data, byte_width](const Array&
result) {
+ auto CheckResult = [&length, &is_valid, &raw_data](const Array& result) {
// Verify output
const auto& fw_result = static_cast<const FixedSizeBinaryArray&>(result);
@@ -2029,7 +2030,6 @@ class TestListArray : public TestBuilder {
protected:
std::shared_ptr<DataType> value_type_;
- std::shared_ptr<DataType> type_;
std::shared_ptr<ListBuilder> builder_;
std::shared_ptr<ListArray> result_;
@@ -2486,7 +2486,6 @@ class TestStructBuilder : public TestBuilder {
protected:
vector<std::shared_ptr<Field>> value_fields_;
- std::shared_ptr<DataType> type_;
std::shared_ptr<StructBuilder> builder_;
std::shared_ptr<StructArray> result_;
diff --git a/cpp/src/arrow/buffer.h b/cpp/src/arrow/buffer.h
index d12eeb4..74a3c68 100644
--- a/cpp/src/arrow/buffer.h
+++ b/cpp/src/arrow/buffer.h
@@ -198,7 +198,7 @@ class ARROW_EXPORT ResizableBuffer : public MutableBuffer {
class ARROW_EXPORT PoolBuffer : public ResizableBuffer {
public:
explicit PoolBuffer(MemoryPool* pool = NULLPTR);
- virtual ~PoolBuffer();
+ ~PoolBuffer() override;
Status Resize(const int64_t new_size, bool shrink_to_fit = true) override;
Status Reserve(const int64_t new_capacity) override;
diff --git a/cpp/src/arrow/builder.cc b/cpp/src/arrow/builder.cc
index a740299..6f9749d 100644
--- a/cpp/src/arrow/builder.cc
+++ b/cpp/src/arrow/builder.cc
@@ -1131,8 +1131,9 @@ Status
Decimal128Builder::FinishInternal(std::shared_ptr<ArrayData>* out) {
ListBuilder::ListBuilder(MemoryPool* pool, std::unique_ptr<ArrayBuilder>
value_builder,
const std::shared_ptr<DataType>& type)
- : ArrayBuilder(type ? type : std::static_pointer_cast<DataType>(
-
std::make_shared<ListType>(value_builder->type())),
+ : ArrayBuilder(type ? type
+ : std::static_pointer_cast<DataType>(
+
std::make_shared<ListType>(value_builder->type())),
pool),
offsets_builder_(pool),
value_builder_(std::move(value_builder)) {}
@@ -1373,8 +1374,8 @@ Status
StructBuilder::FinishInternal(std::shared_ptr<ArrayData>* out) {
return Status::OK();
}
-// ----------------------------------------------------------------------
-// Helper functions
+ // ----------------------------------------------------------------------
+ // Helper functions
#define BUILDER_CASE(ENUM, BuilderType) \
case Type::ENUM: \
diff --git a/cpp/src/arrow/builder.h b/cpp/src/arrow/builder.h
index d1611f6..9826a6c 100644
--- a/cpp/src/arrow/builder.h
+++ b/cpp/src/arrow/builder.h
@@ -857,7 +857,7 @@ class ARROW_EXPORT DictionaryBuilder : public ArrayBuilder {
public:
using Scalar = typename internal::DictionaryScalar<T>::type;
- ~DictionaryBuilder() {}
+ ~DictionaryBuilder() override {}
DictionaryBuilder(const std::shared_ptr<DataType>& type, MemoryPool* pool);
@@ -907,7 +907,7 @@ class ARROW_EXPORT DictionaryBuilder : public ArrayBuilder {
template <>
class ARROW_EXPORT DictionaryBuilder<NullType> : public ArrayBuilder {
public:
- ~DictionaryBuilder();
+ ~DictionaryBuilder() override;
DictionaryBuilder(const std::shared_ptr<DataType>& type, MemoryPool* pool);
explicit DictionaryBuilder(MemoryPool* pool);
diff --git a/cpp/src/arrow/compare.cc b/cpp/src/arrow/compare.cc
index 9f07fa7..9ed54ca 100644
--- a/cpp/src/arrow/compare.cc
+++ b/cpp/src/arrow/compare.cc
@@ -359,9 +359,8 @@ class ArrayEqualsVisitor : public RangeEqualsVisitor {
const uint8_t* right_data = right.values()->data();
for (int64_t i = 0; i < left.length(); ++i) {
- if (left.IsValid(i) &&
- BitUtil::GetBit(left_data, i + left.offset()) !=
- BitUtil::GetBit(right_data, i + right.offset())) {
+ if (left.IsValid(i) && BitUtil::GetBit(left_data, i + left.offset()) !=
+ BitUtil::GetBit(right_data, i +
right.offset())) {
result_ = false;
return Status::OK();
}
diff --git a/cpp/src/arrow/compute/compute-test.cc
b/cpp/src/arrow/compute/compute-test.cc
index 3fc1501..0d27b9d 100644
--- a/cpp/src/arrow/compute/compute-test.cc
+++ b/cpp/src/arrow/compute/compute-test.cc
@@ -291,14 +291,14 @@ TEST_F(TestCast, ToIntDowncastUnsafe) {
TEST_F(TestCast, TimestampToTimestamp) {
CastOptions options;
- auto CheckTimestampCast = [this](
- const CastOptions& options, TimeUnit::type from_unit, TimeUnit::type
to_unit,
- const vector<int64_t>& from_values, const vector<int64_t>& to_values,
- const vector<bool>& is_valid) {
- CheckCase<TimestampType, int64_t, TimestampType, int64_t>(
- timestamp(from_unit), from_values, is_valid, timestamp(to_unit),
to_values,
- options);
- };
+ auto CheckTimestampCast =
+ [this](const CastOptions& options, TimeUnit::type from_unit,
TimeUnit::type to_unit,
+ const vector<int64_t>& from_values, const vector<int64_t>&
to_values,
+ const vector<bool>& is_valid) {
+ CheckCase<TimestampType, int64_t, TimestampType, int64_t>(
+ timestamp(from_unit), from_values, is_valid, timestamp(to_unit),
to_values,
+ options);
+ };
vector<bool> is_valid = {true, false, true, true, true};
diff --git a/cpp/src/arrow/compute/kernels/cast.cc
b/cpp/src/arrow/compute/kernels/cast.cc
index afa0548..eaebd7c 100644
--- a/cpp/src/arrow/compute/kernels/cast.cc
+++ b/cpp/src/arrow/compute/kernels/cast.cc
@@ -83,8 +83,9 @@ struct is_zero_copy_cast {
template <typename O, typename I>
struct is_zero_copy_cast<
- O, I, typename std::enable_if<std::is_same<I, O>::value &&
- !std::is_base_of<ParametricType,
O>::value>::type> {
+ O, I,
+ typename std::enable_if<std::is_same<I, O>::value &&
+ !std::is_base_of<ParametricType, O>::value>::type>
{
static constexpr bool value = true;
};
@@ -121,8 +122,9 @@ struct CastFunctor<O, I, typename
std::enable_if<is_zero_copy_cast<O, I>::value>
// Null to other things
template <typename T>
-struct CastFunctor<T, NullType, typename std::enable_if<
- std::is_base_of<FixedWidthType,
T>::value>::type> {
+struct CastFunctor<
+ T, NullType,
+ typename std::enable_if<std::is_base_of<FixedWidthType, T>::value>::type> {
void operator()(FunctionContext* ctx, const CastOptions& options,
const ArrayData& input, ArrayData* output) {}
};
@@ -172,8 +174,9 @@ struct is_integer_downcast {
template <typename O, typename I>
struct is_integer_downcast<
- O, I, typename std::enable_if<std::is_base_of<Integer, O>::value &&
- std::is_base_of<Integer, I>::value>::type> {
+ O, I,
+ typename std::enable_if<std::is_base_of<Integer, O>::value &&
+ std::is_base_of<Integer, I>::value>::type> {
using O_T = typename O::c_type;
using I_T = typename I::c_type;
@@ -189,9 +192,10 @@ struct is_integer_downcast<
};
template <typename O, typename I>
-struct CastFunctor<O, I, typename std::enable_if<std::is_same<BooleanType,
O>::value &&
- std::is_base_of<Number,
I>::value &&
- !std::is_same<O,
I>::value>::type> {
+struct CastFunctor<O, I,
+ typename std::enable_if<std::is_same<BooleanType, O>::value
&&
+ std::is_base_of<Number, I>::value &&
+ !std::is_same<O, I>::value>::type> {
void operator()(FunctionContext* ctx, const CastOptions& options,
const ArrayData& input, ArrayData* output) {
auto in_data = GetValues<typename I::c_type>(input, 1);
diff --git a/cpp/src/arrow/compute/kernels/hash.cc
b/cpp/src/arrow/compute/kernels/hash.cc
index acbf403..da9797f 100644
--- a/cpp/src/arrow/compute/kernels/hash.cc
+++ b/cpp/src/arrow/compute/kernels/hash.cc
@@ -56,7 +56,7 @@ class HashException : public std::exception {
explicit HashException(const std::string& msg, StatusCode code =
StatusCode::Invalid)
: msg_(msg), code_(code) {}
- ~HashException() throw() {}
+ ~HashException() throw() override {}
const char* what() const throw() override;
diff --git a/cpp/src/arrow/io/file.h b/cpp/src/arrow/io/file.h
index 265df4d..3d65834 100644
--- a/cpp/src/arrow/io/file.h
+++ b/cpp/src/arrow/io/file.h
@@ -37,7 +37,7 @@ namespace io {
class ARROW_EXPORT FileOutputStream : public OutputStream {
public:
- ~FileOutputStream();
+ ~FileOutputStream() override;
/// \brief Open a local file for writing, truncating any existing file
/// \param[in] path with UTF8 encoding
@@ -73,7 +73,7 @@ class ARROW_EXPORT FileOutputStream : public OutputStream {
// Operating system file
class ARROW_EXPORT ReadableFile : public RandomAccessFile {
public:
- ~ReadableFile();
+ ~ReadableFile() override;
/// \brief Open a local file for reading
/// \param[in] path with UTF8 encoding
@@ -125,7 +125,7 @@ class ARROW_EXPORT ReadableFile : public RandomAccessFile {
// FileOutputStream
class ARROW_EXPORT MemoryMappedFile : public ReadWriteFileInterface {
public:
- ~MemoryMappedFile();
+ ~MemoryMappedFile() override;
/// Create new file with indicated size, return in read/write mode
static Status Create(const std::string& path, int64_t size,
diff --git a/cpp/src/arrow/io/hdfs.h b/cpp/src/arrow/io/hdfs.h
index 062473b..a52ec0b 100644
--- a/cpp/src/arrow/io/hdfs.h
+++ b/cpp/src/arrow/io/hdfs.h
@@ -68,7 +68,7 @@ struct HdfsConnectionConfig {
class ARROW_EXPORT HadoopFileSystem : public FileSystem {
public:
- ~HadoopFileSystem();
+ ~HadoopFileSystem() override;
// Connect to an HDFS cluster given a configuration
//
@@ -174,7 +174,7 @@ class ARROW_EXPORT HadoopFileSystem : public FileSystem {
class ARROW_EXPORT HdfsReadableFile : public RandomAccessFile {
public:
- ~HdfsReadableFile();
+ ~HdfsReadableFile() override;
Status Close() override;
@@ -213,7 +213,7 @@ class ARROW_EXPORT HdfsReadableFile : public
RandomAccessFile {
// WriteableFile interface)
class ARROW_EXPORT HdfsOutputStream : public OutputStream {
public:
- ~HdfsOutputStream();
+ ~HdfsOutputStream() override;
Status Close() override;
diff --git a/cpp/src/arrow/io/io-file-test.cc b/cpp/src/arrow/io/io-file-test.cc
index e70431e..a492016 100644
--- a/cpp/src/arrow/io/io-file-test.cc
+++ b/cpp/src/arrow/io/io-file-test.cc
@@ -395,7 +395,7 @@ TEST_F(TestReadableFile, ThreadSafety) {
std::atomic<int> correct_count(0);
constexpr int niter = 10000;
- auto ReadData = [&correct_count, &data, this, niter]() {
+ auto ReadData = [&correct_count, &data, this]() {
std::shared_ptr<Buffer> buffer;
for (int i = 0; i < niter; ++i) {
@@ -588,7 +588,7 @@ TEST_F(TestMemoryMappedFile, ThreadSafety) {
std::atomic<int> correct_count(0);
constexpr int niter = 10000;
- auto ReadData = [&correct_count, &data, &file, niter]() {
+ auto ReadData = [&correct_count, &data, &file]() {
std::shared_ptr<Buffer> buffer;
for (int i = 0; i < niter; ++i) {
diff --git a/cpp/src/arrow/io/io-hdfs-test.cc b/cpp/src/arrow/io/io-hdfs-test.cc
index f2ded6f..380fb34 100644
--- a/cpp/src/arrow/io/io-hdfs-test.cc
+++ b/cpp/src/arrow/io/io-hdfs-test.cc
@@ -454,7 +454,7 @@ TYPED_TEST(TestHadoopFileSystem, ThreadSafety) {
std::atomic<int> correct_count(0);
constexpr int niter = 1000;
- auto ReadData = [&file, &correct_count, &data, niter]() {
+ auto ReadData = [&file, &correct_count, &data]() {
for (int i = 0; i < niter; ++i) {
std::shared_ptr<Buffer> buffer;
if (i % 2 == 0) {
diff --git a/cpp/src/arrow/ipc/feather.cc b/cpp/src/arrow/ipc/feather.cc
index f440c19..faf6a08 100644
--- a/cpp/src/arrow/ipc/feather.cc
+++ b/cpp/src/arrow/ipc/feather.cc
@@ -579,9 +579,10 @@ class TableWriter::TableWriterImpl : public ArrayVisitor {
values_bytes = bin_values.raw_value_offsets()[values.length()];
// Write the variable-length offsets
- RETURN_NOT_OK(WritePadded(stream_.get(), reinterpret_cast<const
uint8_t*>(
-
bin_values.raw_value_offsets()),
- offset_bytes, &bytes_written));
+ RETURN_NOT_OK(
+ WritePadded(stream_.get(),
+ reinterpret_cast<const
uint8_t*>(bin_values.raw_value_offsets()),
+ offset_bytes, &bytes_written));
} else {
RETURN_NOT_OK(WritePaddedBlank(stream_.get(), offset_bytes,
&bytes_written));
}
diff --git a/cpp/src/arrow/ipc/reader.h b/cpp/src/arrow/ipc/reader.h
index 019c9bc..1763527 100644
--- a/cpp/src/arrow/ipc/reader.h
+++ b/cpp/src/arrow/ipc/reader.h
@@ -53,7 +53,7 @@ using RecordBatchReader = ::arrow::RecordBatchReader;
/// reads see the ReadRecordBatch functions
class ARROW_EXPORT RecordBatchStreamReader : public RecordBatchReader {
public:
- virtual ~RecordBatchStreamReader();
+ ~RecordBatchStreamReader() override;
/// Create batch reader from generic MessageReader
///
diff --git a/cpp/src/arrow/ipc/writer.cc b/cpp/src/arrow/ipc/writer.cc
index c6aa770..3debd76 100644
--- a/cpp/src/arrow/ipc/writer.cc
+++ b/cpp/src/arrow/ipc/writer.cc
@@ -106,7 +106,7 @@ class RecordBatchSerializer : public ArrayVisitor {
DCHECK_GT(max_recursion_depth, 0);
}
- virtual ~RecordBatchSerializer() = default;
+ ~RecordBatchSerializer() override = default;
Status VisitArray(const Array& arr) {
if (max_recursion_depth_ <= 0) {
@@ -956,17 +956,17 @@ Status RecordBatchFileWriter::Open(io::OutputStream* sink,
std::shared_ptr<RecordBatchWriter>* out) {
// ctor is private
auto result = std::shared_ptr<RecordBatchFileWriter>(new
RecordBatchFileWriter());
- result->impl_.reset(new RecordBatchFileWriterImpl(sink, schema));
+ result->file_impl_.reset(new RecordBatchFileWriterImpl(sink, schema));
*out = result;
return Status::OK();
}
Status RecordBatchFileWriter::WriteRecordBatch(const RecordBatch& batch,
bool allow_64bit) {
- return impl_->WriteRecordBatch(batch, allow_64bit);
+ return file_impl_->WriteRecordBatch(batch, allow_64bit);
}
-Status RecordBatchFileWriter::Close() { return impl_->Close(); }
+Status RecordBatchFileWriter::Close() { return file_impl_->Close(); }
// ----------------------------------------------------------------------
// Serialization public APIs
diff --git a/cpp/src/arrow/ipc/writer.h b/cpp/src/arrow/ipc/writer.h
index 013783e..6dbf29d 100644
--- a/cpp/src/arrow/ipc/writer.h
+++ b/cpp/src/arrow/ipc/writer.h
@@ -88,7 +88,7 @@ class ARROW_EXPORT RecordBatchWriter {
/// format
class ARROW_EXPORT RecordBatchStreamWriter : public RecordBatchWriter {
public:
- virtual ~RecordBatchStreamWriter();
+ ~RecordBatchStreamWriter() override;
/// Create a new writer from stream sink and schema. User is responsible for
/// closing the actual OutputStream.
@@ -126,7 +126,7 @@ class ARROW_EXPORT RecordBatchStreamWriter : public
RecordBatchWriter {
/// numbers are written at the start and end of the file
class ARROW_EXPORT RecordBatchFileWriter : public RecordBatchStreamWriter {
public:
- virtual ~RecordBatchFileWriter();
+ ~RecordBatchFileWriter() override;
/// Create a new writer from stream sink and schema
///
@@ -151,7 +151,7 @@ class ARROW_EXPORT RecordBatchFileWriter : public
RecordBatchStreamWriter {
private:
RecordBatchFileWriter();
class ARROW_NO_EXPORT RecordBatchFileWriterImpl;
- std::unique_ptr<RecordBatchFileWriterImpl> impl_;
+ std::unique_ptr<RecordBatchFileWriterImpl> file_impl_;
};
/// \brief Low-level API for writing a record batch (without schema) to an
OutputStream
diff --git a/cpp/src/arrow/memory_pool.cc b/cpp/src/arrow/memory_pool.cc
index 3496636..9953043 100644
--- a/cpp/src/arrow/memory_pool.cc
+++ b/cpp/src/arrow/memory_pool.cc
@@ -89,7 +89,7 @@ class DefaultMemoryPool : public MemoryPool {
public:
DefaultMemoryPool() : bytes_allocated_(0) { max_memory_ = 0; }
- ~DefaultMemoryPool() {}
+ ~DefaultMemoryPool() override {}
Status Allocate(int64_t size, uint8_t** out) override {
RETURN_NOT_OK(AllocateAligned(size, out));
diff --git a/cpp/src/arrow/memory_pool.h b/cpp/src/arrow/memory_pool.h
index 52ec67f..348343b 100644
--- a/cpp/src/arrow/memory_pool.h
+++ b/cpp/src/arrow/memory_pool.h
@@ -71,7 +71,7 @@ class ARROW_EXPORT MemoryPool {
class ARROW_EXPORT LoggingMemoryPool : public MemoryPool {
public:
explicit LoggingMemoryPool(MemoryPool* pool);
- virtual ~LoggingMemoryPool() = default;
+ ~LoggingMemoryPool() override = default;
Status Allocate(int64_t size, uint8_t** out) override;
Status Reallocate(int64_t old_size, int64_t new_size, uint8_t** ptr)
override;
diff --git a/cpp/src/arrow/python/CMakeLists.txt
b/cpp/src/arrow/python/CMakeLists.txt
index aa12baa..f931abe 100644
--- a/cpp/src/arrow/python/CMakeLists.txt
+++ b/cpp/src/arrow/python/CMakeLists.txt
@@ -62,6 +62,13 @@ set(ARROW_PYTHON_SRCS
pyarrow.cc
)
+if ("${COMPILER_FAMILY}" STREQUAL "clang")
+ set_property(SOURCE pyarrow.cc
+ APPEND_STRING
+ PROPERTY COMPILE_FLAGS
+ " -Wno-cast-qual ")
+endif()
+
set(ARROW_PYTHON_SHARED_LINK_LIBS
arrow_shared
)
diff --git a/cpp/src/arrow/python/io.h b/cpp/src/arrow/python/io.h
index 648f6de..6960556 100644
--- a/cpp/src/arrow/python/io.h
+++ b/cpp/src/arrow/python/io.h
@@ -39,7 +39,7 @@ class ARROW_NO_EXPORT PythonFile;
class ARROW_EXPORT PyReadableFile : public io::RandomAccessFile {
public:
explicit PyReadableFile(PyObject* file);
- virtual ~PyReadableFile();
+ ~PyReadableFile() override;
Status Close() override;
@@ -68,7 +68,7 @@ class ARROW_EXPORT PyReadableFile : public
io::RandomAccessFile {
class ARROW_EXPORT PyOutputStream : public io::OutputStream {
public:
explicit PyOutputStream(PyObject* file);
- virtual ~PyOutputStream();
+ ~PyOutputStream() override;
Status Close() override;
Status Tell(int64_t* position) const override;
diff --git a/cpp/src/arrow/python/numpy_to_arrow.cc
b/cpp/src/arrow/python/numpy_to_arrow.cc
index 3dd5a79..522bf51 100644
--- a/cpp/src/arrow/python/numpy_to_arrow.cc
+++ b/cpp/src/arrow/python/numpy_to_arrow.cc
@@ -67,7 +67,7 @@ constexpr int64_t kBinaryMemoryLimit =
std::numeric_limits<int32_t>::max();
namespace {
-inline bool PyFloat_isnan(const PyObject* obj) {
+inline bool PyFloat_isnan(PyObject* obj) {
if (PyFloat_Check(obj)) {
double val = PyFloat_AS_DOUBLE(obj);
return val != val;
@@ -76,11 +76,11 @@ inline bool PyFloat_isnan(const PyObject* obj) {
}
}
-inline bool PandasObjectIsNull(const PyObject* obj) {
+inline bool PandasObjectIsNull(PyObject* obj) {
return obj == Py_None || obj == numpy_nan || PyFloat_isnan(obj);
}
-inline bool PyObject_is_string(const PyObject* obj) {
+inline bool PyObject_is_string(PyObject* obj) {
#if PY_MAJOR_VERSION >= 3
return PyUnicode_Check(obj) || PyBytes_Check(obj);
#else
@@ -88,9 +88,9 @@ inline bool PyObject_is_string(const PyObject* obj) {
#endif
}
-inline bool PyObject_is_float(const PyObject* obj) { return
PyFloat_Check(obj); }
+inline bool PyObject_is_float(PyObject* obj) { return PyFloat_Check(obj); }
-inline bool PyObject_is_integer(const PyObject* obj) {
+inline bool PyObject_is_integer(PyObject* obj) {
return (!PyBool_Check(obj)) && PyArray_IsIntegerScalar(obj);
}
diff --git a/cpp/src/arrow/table.h b/cpp/src/arrow/table.h
index 570a650..6938db1 100644
--- a/cpp/src/arrow/table.h
+++ b/cpp/src/arrow/table.h
@@ -222,7 +222,7 @@ class ARROW_EXPORT Table {
/// \brief Compute a sequence of record batches from a (possibly chunked) Table
class ARROW_EXPORT TableBatchReader : public RecordBatchReader {
public:
- ~TableBatchReader();
+ ~TableBatchReader() override;
/// \brief Read batches with the maximum possible size
explicit TableBatchReader(const Table& table);
diff --git a/cpp/src/arrow/test-common.h b/cpp/src/arrow/test-common.h
index 911adf7..4f7a268 100644
--- a/cpp/src/arrow/test-common.h
+++ b/cpp/src/arrow/test-common.h
@@ -119,19 +119,11 @@ std::shared_ptr<Array>
TestBase::MakeRandomArray<BinaryArray>(int64_t length,
class TestBuilder : public ::testing::Test {
public:
- void SetUp() {
- pool_ = default_memory_pool();
- type_ = uint8();
- builder_.reset(new UInt8Builder(pool_));
- builder_nn_.reset(new UInt8Builder(pool_));
- }
+ void SetUp() { pool_ = default_memory_pool(); }
protected:
MemoryPool* pool_;
-
std::shared_ptr<DataType> type_;
- std::unique_ptr<ArrayBuilder> builder_;
- std::unique_ptr<ArrayBuilder> builder_nn_;
};
} // namespace arrow
diff --git a/cpp/src/arrow/type.cc b/cpp/src/arrow/type.cc
index 836a2aa..6574cce 100644
--- a/cpp/src/arrow/type.cc
+++ b/cpp/src/arrow/type.cc
@@ -364,8 +364,8 @@ std::shared_ptr<Schema>
schema(std::vector<std::shared_ptr<Field>>&& fields,
return std::make_shared<Schema>(std::move(fields), metadata);
}
-// ----------------------------------------------------------------------
-// Visitors and factory functions
+ // ----------------------------------------------------------------------
+ // Visitors and factory functions
#define ACCEPT_VISITOR(TYPE) \
Status TYPE::Accept(TypeVisitor* visitor) const { return
visitor->Visit(*this); }
diff --git a/cpp/src/arrow/util/bit-util.h b/cpp/src/arrow/util/bit-util.h
index 86c17d1..aed3a01 100644
--- a/cpp/src/arrow/util/bit-util.h
+++ b/cpp/src/arrow/util/bit-util.h
@@ -360,23 +360,20 @@ static inline void ByteSwap(void* dst, const void* src,
int len) {
/// Converts to big endian format (if not already in big endian) from the
/// machine's native endian format.
#if ARROW_LITTLE_ENDIAN
-template <typename T,
- typename =
- EnableIfIsOneOf<T, int64_t, uint64_t, int32_t, uint32_t,
int16_t, uint16_t>>
+template <typename T, typename = EnableIfIsOneOf<T, int64_t, uint64_t,
int32_t, uint32_t,
+ int16_t, uint16_t>>
static inline T ToBigEndian(T value) {
return ByteSwap(value);
}
-template <typename T,
- typename =
- EnableIfIsOneOf<T, int64_t, uint64_t, int32_t, uint32_t,
int16_t, uint16_t>>
+template <typename T, typename = EnableIfIsOneOf<T, int64_t, uint64_t,
int32_t, uint32_t,
+ int16_t, uint16_t>>
static inline T ToLittleEndian(T value) {
return value;
}
#else
-template <typename T,
- typename =
- EnableIfIsOneOf<T, int64_t, uint64_t, int32_t, uint32_t,
int16_t, uint16_t>>
+template <typename T, typename = EnableIfIsOneOf<T, int64_t, uint64_t,
int32_t, uint32_t,
+ int16_t, uint16_t>>
static inline T ToBigEndian(T value) {
return value;
}
@@ -384,30 +381,26 @@ static inline T ToBigEndian(T value) {
/// Converts from big endian format to the machine's native endian format.
#if ARROW_LITTLE_ENDIAN
-template <typename T,
- typename =
- EnableIfIsOneOf<T, int64_t, uint64_t, int32_t, uint32_t,
int16_t, uint16_t>>
+template <typename T, typename = EnableIfIsOneOf<T, int64_t, uint64_t,
int32_t, uint32_t,
+ int16_t, uint16_t>>
static inline T FromBigEndian(T value) {
return ByteSwap(value);
}
-template <typename T,
- typename =
- EnableIfIsOneOf<T, int64_t, uint64_t, int32_t, uint32_t,
int16_t, uint16_t>>
+template <typename T, typename = EnableIfIsOneOf<T, int64_t, uint64_t,
int32_t, uint32_t,
+ int16_t, uint16_t>>
static inline T FromLittleEndian(T value) {
return value;
}
#else
-template <typename T,
- typename =
- EnableIfIsOneOf<T, int64_t, uint64_t, int32_t, uint32_t,
int16_t, uint16_t>>
+template <typename T, typename = EnableIfIsOneOf<T, int64_t, uint64_t,
int32_t, uint32_t,
+ int16_t, uint16_t>>
static inline T FromBigEndian(T value) {
return value;
}
-template <typename T,
- typename =
- EnableIfIsOneOf<T, int64_t, uint64_t, int32_t, uint32_t,
int16_t, uint16_t>>
+template <typename T, typename = EnableIfIsOneOf<T, int64_t, uint64_t,
int32_t, uint32_t,
+ int16_t, uint16_t>>
static inline T FromLittleEndian(T value) {
return ByteSwap(value);
}
diff --git a/cpp/src/arrow/util/compression_zlib.h
b/cpp/src/arrow/util/compression_zlib.h
index a18af6e..0a7b0ce 100644
--- a/cpp/src/arrow/util/compression_zlib.h
+++ b/cpp/src/arrow/util/compression_zlib.h
@@ -38,7 +38,7 @@ class ARROW_EXPORT GZipCodec : public Codec {
};
explicit GZipCodec(Format format = GZIP);
- virtual ~GZipCodec();
+ ~GZipCodec() override;
Status Decompress(int64_t input_len, const uint8_t* input, int64_t
output_len,
uint8_t* output_buffer) override;
diff --git a/cpp/src/arrow/util/io-util.h b/cpp/src/arrow/util/io-util.h
index d1af6c6..864e1bd 100644
--- a/cpp/src/arrow/util/io-util.h
+++ b/cpp/src/arrow/util/io-util.h
@@ -32,7 +32,7 @@ namespace io {
class StdoutStream : public OutputStream {
public:
StdoutStream() : pos_(0) { set_mode(FileMode::WRITE); }
- virtual ~StdoutStream() {}
+ ~StdoutStream() override {}
Status Close() override { return Status::OK(); }
@@ -55,7 +55,7 @@ class StdoutStream : public OutputStream {
class StdinStream : public InputStream {
public:
StdinStream() : pos_(0) { set_mode(FileMode::READ); }
- virtual ~StdinStream() {}
+ ~StdinStream() override {}
Status Close() override { return Status::OK(); }
diff --git a/cpp/src/plasma/plasma.cc b/cpp/src/plasma/plasma.cc
index 8708281..0a019dd 100644
--- a/cpp/src/plasma/plasma.cc
+++ b/cpp/src/plasma/plasma.cc
@@ -33,8 +33,9 @@ int warn_if_sigpipe(int status, int client_sock) {
if (errno == EPIPE || errno == EBADF || errno == ECONNRESET) {
ARROW_LOG(WARNING) << "Received SIGPIPE, BAD FILE DESCRIPTOR, or
ECONNRESET when "
"sending a message to client on fd "
- << client_sock << ". The client on the other end may "
- "have hung up.";
+ << client_sock
+ << ". The client on the other end may "
+ "have hung up.";
return errno;
}
ARROW_LOG(FATAL) << "Failed to write message to client on fd " <<
client_sock << ".";
diff --git a/cpp/src/plasma/plasma.h b/cpp/src/plasma/plasma.h
index 901601f..cfaa927 100644
--- a/cpp/src/plasma/plasma.h
+++ b/cpp/src/plasma/plasma.h
@@ -55,8 +55,9 @@ namespace plasma {
ARROW_LOG(WARNING) \
<< "Received SIGPIPE, BAD FILE DESCRIPTOR, or ECONNRESET when " \
"sending a message to client on fd " \
- << fd_ << ". " \
- "The client on the other end may have hung up."; \
+ << fd_ \
+ << ". " \
+ "The client on the other end may have hung up."; \
} else { \
return _s; \
} \
--
To stop receiving notification emails like this one, please contact
[email protected].