[
https://issues.apache.org/jira/browse/PARQUET-1226?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16370318#comment-16370318
]
ASF GitHub Bot commented on PARQUET-1226:
-----------------------------------------
wesm closed pull request #442: PARQUET-1226: Fixes for CHECKIN compiler warning
level with clang 5.0
URL: https://github.com/apache/parquet-cpp/pull/442
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c2d4ef4f..bca8478c 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -84,7 +84,7 @@ enable_testing()
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake_modules")
set(BUILD_SUPPORT_DIR "${CMAKE_SOURCE_DIR}/build-support")
-set(CLANG_FORMAT_VERSION "4.0")
+set(CLANG_FORMAT_VERSION "5.0")
find_package(ClangTools)
if ("$ENV{CMAKE_EXPORT_COMPILE_COMMANDS}" STREQUAL "1" OR CLANG_TIDY_FOUND)
# Generate a Clang compile_commands.json "compilation database" file for use
@@ -160,6 +160,11 @@ if ("${CMAKE_SOURCE_DIR}" STREQUAL
"${CMAKE_CURRENT_SOURCE_DIR}")
"Build Parquet with statically linked CRT"
OFF)
endif()
+
+ option(PARQUET_VERBOSE_THIRDPARTY_BUILD
+ "If off, output from ExternalProjects will be logged to files rather than
shown"
+ OFF)
+
endif()
include(BuildUtils)
diff --git a/cmake_modules/SetupCxxFlags.cmake
b/cmake_modules/SetupCxxFlags.cmake
index 1678e8dc..01ed85bf 100644
--- a/cmake_modules/SetupCxxFlags.cmake
+++ b/cmake_modules/SetupCxxFlags.cmake
@@ -109,6 +109,11 @@ if ("${UPPERCASE_BUILD_WARNING_LEVEL}" STREQUAL "CHECKIN")
set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-undefined-func-template")
endif()
+ if ("${COMPILER_VERSION}" VERSION_GREATER "4.0")
+ set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unused-template \
+-Wno-zero-as-null-pointer-constant")
+ endif()
+
# Treat all compiler warnings as errors
set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unknown-warning-option
-Werror")
elseif ("${COMPILER_FAMILY}" STREQUAL "gcc")
diff --git a/cmake_modules/ThirdpartyToolchain.cmake
b/cmake_modules/ThirdpartyToolchain.cmake
index 09e30dfe..9f241121 100644
--- a/cmake_modules/ThirdpartyToolchain.cmake
+++ b/cmake_modules/ThirdpartyToolchain.cmake
@@ -30,6 +30,18 @@ if (NOT MSVC)
set(EP_C_FLAGS "${EP_C_FLAGS} -fPIC")
endif()
+if (NOT PARQUET_VERBOSE_THIRDPARTY_BUILD)
+ set(EP_LOG_OPTIONS
+ LOG_CONFIGURE 1
+ LOG_BUILD 1
+ LOG_INSTALL 1
+ LOG_DOWNLOAD 1)
+ set(Boost_DEBUG FALSE)
+else()
+ set(EP_LOG_OPTIONS)
+ set(Boost_DEBUG TRUE)
+endif()
+
# ----------------------------------------------------------------------
# Configure toolchain with environment variables, if the exist
@@ -52,7 +64,6 @@ endif()
# Boost
# find boost headers and libs
-set(Boost_DEBUG TRUE)
set(Boost_USE_MULTITHREADED ON)
if (MSVC AND PARQUET_USE_STATIC_CRT)
set(Boost_USE_STATIC_RUNTIME ON)
@@ -168,6 +179,7 @@ if (NOT THRIFT_FOUND)
URL "http://zlib.net/fossils/zlib-1.2.8.tar.gz"
BUILD_BYPRODUCTS "${ZLIB_STATIC_LIB}"
${ZLIB_BUILD_BYPRODUCTS}
+ ${EP_LOG_OPTIONS}
CMAKE_ARGS ${ZLIB_CMAKE_ARGS})
set(THRIFT_PREFIX
"${CMAKE_CURRENT_BINARY_DIR}/thrift_ep/src/thrift_ep-install")
@@ -212,7 +224,10 @@ if (NOT THRIFT_FOUND)
URL
https://github.com/lexxmark/winflexbison/releases/download/v.${WINFLEXBISON_VERSION}/win_flex_bison-${WINFLEXBISON_VERSION}.zip
URL_HASH MD5=a2e979ea9928fbf8567e995e9c0df765
SOURCE_DIR ${WINFLEXBISON_PREFIX}
- CONFIGURE_COMMAND "" BUILD_COMMAND "" INSTALL_COMMAND "")
+ CONFIGURE_COMMAND ""
+ BUILD_COMMAND ""
+ INSTALL_COMMAND ""
+ ${EP_LOG_OPTIONS})
set(THRIFT_DEPENDENCIES ${THRIFT_DEPENDENCIES} winflexbison_ep)
set(THRIFT_CMAKE_ARGS
"-DFLEX_EXECUTABLE=${WINFLEXBISON_PREFIX}/win_flex.exe"
@@ -229,7 +244,8 @@ if (NOT THRIFT_FOUND)
URL
"http://archive.apache.org/dist/thrift/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}.tar.gz"
BUILD_BYPRODUCTS "${THRIFT_STATIC_LIB}" "${THRIFT_COMPILER}"
CMAKE_ARGS ${THRIFT_CMAKE_ARGS}
- DEPENDS ${THRIFT_DEPENDENCIES})
+ DEPENDS ${THRIFT_DEPENDENCIES}
+ ${EP_LOG_OPTIONS})
set(THRIFT_VENDORED 1)
else()
@@ -268,7 +284,7 @@ if(PARQUET_BUILD_TESTS AND NOT IGNORE_OPTIONAL_PACKAGES)
set(GTEST_CMAKE_ARGS -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX=${GTEST_PREFIX}
-DCMAKE_CXX_FLAGS=${GTEST_CMAKE_CXX_FLAGS})
-
+
if (MSVC AND NOT PARQUET_USE_STATIC_CRT)
set(GTEST_CMAKE_ARGS ${GTEST_CMAKE_ARGS} -Dgtest_force_shared_crt=ON)
endif()
@@ -276,7 +292,8 @@ if(PARQUET_BUILD_TESTS AND NOT IGNORE_OPTIONAL_PACKAGES)
ExternalProject_Add(googletest_ep
URL
"https://github.com/google/googletest/archive/release-${GTEST_VERSION}.tar.gz"
BUILD_BYPRODUCTS "${GTEST_STATIC_LIB}" "${GTEST_MAIN_STATIC_LIB}"
- CMAKE_ARGS ${GTEST_CMAKE_ARGS})
+ CMAKE_ARGS ${GTEST_CMAKE_ARGS}
+ ${EP_LOG_OPTIONS})
set(GTEST_VENDORED 1)
else()
find_package(GTest REQUIRED)
@@ -328,7 +345,8 @@ if(PARQUET_BUILD_BENCHMARKS AND NOT
IGNORE_OPTIONAL_PACKAGES)
ExternalProject_Add(gbenchmark_ep
URL
"https://github.com/google/benchmark/archive/v${GBENCHMARK_VERSION}.tar.gz"
BUILD_BYPRODUCTS "${GBENCHMARK_STATIC_LIB}"
- CMAKE_ARGS ${GBENCHMARK_CMAKE_ARGS})
+ CMAKE_ARGS ${GBENCHMARK_CMAKE_ARGS}
+ ${EP_LOG_OPTIONS})
set(GBENCHMARK_VENDORED 1)
else()
find_package(GBenchmark REQUIRED)
diff --git a/src/parquet/arrow/arrow-reader-writer-test.cc
b/src/parquet/arrow/arrow-reader-writer-test.cc
index 369eb2e1..72e65d47 100644
--- a/src/parquet/arrow/arrow-reader-writer-test.cc
+++ b/src/parquet/arrow/arrow-reader-writer-test.cc
@@ -56,10 +56,10 @@ using arrow::PrimitiveArray;
using arrow::Status;
using arrow::Table;
using arrow::TimeUnit;
-using arrow::default_memory_pool;
+using arrow::compute::Datum;
using arrow::compute::DictionaryEncode;
using arrow::compute::FunctionContext;
-using arrow::compute::Datum;
+using arrow::default_memory_pool;
using arrow::io::BufferReader;
using arrow::test::randint;
@@ -67,10 +67,10 @@ using arrow::test::random_is_valid;
using ArrowId = ::arrow::Type;
using ParquetType = parquet::Type;
+using parquet::arrow::FromParquetSchema;
using parquet::schema::GroupNode;
using parquet::schema::NodePtr;
using parquet::schema::PrimitiveNode;
-using parquet::arrow::FromParquetSchema;
using ColumnVector = std::vector<std::shared_ptr<arrow::Column>>;
diff --git a/src/parquet/arrow/arrow-schema-test.cc
b/src/parquet/arrow/arrow-schema-test.cc
index 771b9960..d502d243 100644
--- a/src/parquet/arrow/arrow-schema-test.cc
+++ b/src/parquet/arrow/arrow-schema-test.cc
@@ -32,8 +32,8 @@ using arrow::TimeUnit;
using ParquetType = parquet::Type;
using parquet::LogicalType;
using parquet::Repetition;
-using parquet::schema::NodePtr;
using parquet::schema::GroupNode;
+using parquet::schema::NodePtr;
using parquet::schema::PrimitiveNode;
namespace parquet {
@@ -579,9 +579,10 @@ TEST_F(TestConvertParquetSchema,
ParquetRepeatedNestedSchema) {
auto inner_group_type =
std::make_shared<::arrow::StructType>(inner_group_fields);
auto outer_group_fields = {
std::make_shared<Field>("leaf2", INT32, true),
- std::make_shared<Field>("innerGroup",
::arrow::list(std::make_shared<Field>(
- "innerGroup",
inner_group_type, false)),
- false)};
+ std::make_shared<Field>(
+ "innerGroup",
+ ::arrow::list(std::make_shared<Field>("innerGroup",
inner_group_type, false)),
+ false)};
auto outer_group_type =
std::make_shared<::arrow::StructType>(outer_group_fields);
arrow_fields.push_back(std::make_shared<Field>("leaf1", INT32, true));
diff --git a/src/parquet/arrow/reader.cc b/src/parquet/arrow/reader.cc
index 7f81771b..bd68ec32 100644
--- a/src/parquet/arrow/reader.cc
+++ b/src/parquet/arrow/reader.cc
@@ -45,12 +45,12 @@ using arrow::Column;
using arrow::Field;
using arrow::Int32Array;
using arrow::ListArray;
-using arrow::StructArray;
-using arrow::TimestampArray;
using arrow::MemoryPool;
using arrow::PoolBuffer;
using arrow::Status;
+using arrow::StructArray;
using arrow::Table;
+using arrow::TimestampArray;
using parquet::schema::Node;
@@ -218,8 +218,6 @@ class PARQUET_NO_EXPORT PrimitiveImpl : public
ColumnReader::ColumnReaderImpl {
NextRowGroup();
}
- virtual ~PrimitiveImpl() {}
-
Status NextBatch(int64_t records_to_read, std::shared_ptr<Array>* out)
override;
template <typename ParquetType>
@@ -254,8 +252,6 @@ class PARQUET_NO_EXPORT StructImpl : public
ColumnReader::ColumnReaderImpl {
InitField(node, children);
}
- virtual ~StructImpl() {}
-
Status NextBatch(int64_t records_to_read, std::shared_ptr<Array>* out)
override;
Status GetDefLevels(const int16_t** data, size_t* length) override;
Status GetRepLevels(const int16_t** data, size_t* length) override;
@@ -425,8 +421,7 @@ Status FileReader::Impl::ReadRowGroup(int row_group_index,
// TODO(wesm): Refactor to share more code with ReadTable
- auto ReadColumnFunc = [&indices, &row_group_index, &schema, &columns,
&rg_metadata,
- this](int i) {
+ auto ReadColumnFunc = [&indices, &row_group_index, &schema, &columns,
this](int i) {
int column_index = indices[i];
std::shared_ptr<Array> array;
diff --git a/src/parquet/arrow/record_reader.cc
b/src/parquet/arrow/record_reader.cc
index cc968e9c..a3af5ac9 100644
--- a/src/parquet/arrow/record_reader.cc
+++ b/src/parquet/arrow/record_reader.cc
@@ -349,8 +349,6 @@ class TypedRecordReader : public
RecordReader::RecordReaderImpl {
public:
typedef typename DType::c_type T;
- ~TypedRecordReader() {}
-
TypedRecordReader(const ColumnDescriptor* schema, ::arrow::MemoryPool* pool)
: RecordReader::RecordReaderImpl(schema, pool),
current_decoder_(nullptr) {}
diff --git a/src/parquet/arrow/schema.cc b/src/parquet/arrow/schema.cc
index e8bcce0d..48b1181c 100644
--- a/src/parquet/arrow/schema.cc
+++ b/src/parquet/arrow/schema.cc
@@ -34,9 +34,9 @@ using arrow::TypePtr;
using ArrowType = arrow::Type;
using parquet::Repetition;
+using parquet::schema::GroupNode;
using parquet::schema::Node;
using parquet::schema::NodePtr;
-using parquet::schema::GroupNode;
using parquet::schema::PrimitiveNode;
using ParquetType = parquet::Type;
diff --git a/src/parquet/arrow/writer.cc b/src/parquet/arrow/writer.cc
index 85d5bd3f..5040e0cc 100644
--- a/src/parquet/arrow/writer.cc
+++ b/src/parquet/arrow/writer.cc
@@ -31,18 +31,18 @@
using arrow::Array;
using arrow::BinaryArray;
+using arrow::BooleanArray;
using arrow::ChunkedArray;
-using arrow::FixedSizeBinaryArray;
using arrow::Decimal128Array;
-using arrow::BooleanArray;
+using arrow::Field;
+using arrow::FixedSizeBinaryArray;
using arrow::Int16Array;
using arrow::Int16Builder;
-using arrow::Field;
+using arrow::ListArray;
using arrow::MemoryPool;
using arrow::NumericArray;
using arrow::PoolBuffer;
using arrow::PrimitiveArray;
-using arrow::ListArray;
using arrow::Status;
using arrow::Table;
using arrow::TimeUnit;
diff --git a/src/parquet/column_reader-test.cc
b/src/parquet/column_reader-test.cc
index 25993254..ebb70f1d 100644
--- a/src/parquet/column_reader-test.cc
+++ b/src/parquet/column_reader-test.cc
@@ -32,9 +32,9 @@
#include "parquet/types.h"
#include "parquet/util/test-common.h"
+using std::shared_ptr;
using std::string;
using std::vector;
-using std::shared_ptr;
namespace parquet {
diff --git a/src/parquet/column_reader.cc b/src/parquet/column_reader.cc
index 4c114397..10d72109 100644
--- a/src/parquet/column_reader.cc
+++ b/src/parquet/column_reader.cc
@@ -111,8 +111,6 @@ class SerializedPageReader : public PageReader {
decompressor_ = GetCodecFromArrow(codec);
}
- virtual ~SerializedPageReader() {}
-
// Implement the PageReader interface
std::shared_ptr<Page> NextPage() override;
diff --git a/src/parquet/column_reader.h b/src/parquet/column_reader.h
index 6158cb3b..0d5f6ecc 100644
--- a/src/parquet/column_reader.h
+++ b/src/parquet/column_reader.h
@@ -216,7 +216,6 @@ class PARQUET_EXPORT TypedColumnReader : public
ColumnReader {
TypedColumnReader(const ColumnDescriptor* schema,
std::unique_ptr<PageReader> pager,
::arrow::MemoryPool* pool = ::arrow::default_memory_pool())
: ColumnReader(schema, std::move(pager), pool),
current_decoder_(nullptr) {}
- virtual ~TypedColumnReader() {}
// Read a batch of repetition levels, definition levels, and values from the
// column.
diff --git a/src/parquet/column_scanner-test.cc
b/src/parquet/column_scanner-test.cc
index 0cebdc0a..1ebc7196 100644
--- a/src/parquet/column_scanner-test.cc
+++ b/src/parquet/column_scanner-test.cc
@@ -32,9 +32,9 @@
#include "parquet/types.h"
#include "parquet/util/test-common.h"
+using std::shared_ptr;
using std::string;
using std::vector;
-using std::shared_ptr;
namespace parquet {
diff --git a/src/parquet/column_writer.cc b/src/parquet/column_writer.cc
index 6d6347aa..4f2ef6c4 100644
--- a/src/parquet/column_writer.cc
+++ b/src/parquet/column_writer.cc
@@ -141,8 +141,6 @@ class SerializedPageWriter : public PageWriter {
compressor_ = GetCodecFromArrow(codec);
}
- virtual ~SerializedPageWriter() = default;
-
int64_t WriteDictionaryPage(const DictionaryPage& page) override {
int64_t uncompressed_size = page.size();
std::shared_ptr<Buffer> compressed_data = nullptr;
@@ -462,8 +460,9 @@
TypedColumnWriter<Type>::TypedColumnWriter(ColumnChunkMetaDataBuilder* metadata,
std::unique_ptr<PageWriter> pager,
Encoding::type encoding,
const WriterProperties* properties)
- : ColumnWriter(metadata, std::move(pager), (encoding ==
Encoding::PLAIN_DICTIONARY ||
- encoding ==
Encoding::RLE_DICTIONARY),
+ : ColumnWriter(metadata, std::move(pager),
+ (encoding == Encoding::PLAIN_DICTIONARY ||
+ encoding == Encoding::RLE_DICTIONARY),
encoding, properties) {
switch (encoding) {
case Encoding::PLAIN:
diff --git a/src/parquet/encoding-benchmark.cc
b/src/parquet/encoding-benchmark.cc
index 9556fd1d..ca12c6a8 100644
--- a/src/parquet/encoding-benchmark.cc
+++ b/src/parquet/encoding-benchmark.cc
@@ -20,8 +20,8 @@
#include "parquet/encoding-internal.h"
#include "parquet/util/memory.h"
-using arrow::default_memory_pool;
using arrow::MemoryPool;
+using arrow::default_memory_pool;
namespace parquet {
diff --git a/src/parquet/encoding-internal.h b/src/parquet/encoding-internal.h
index 3e9a16d0..894410f0 100644
--- a/src/parquet/encoding-internal.h
+++ b/src/parquet/encoding-internal.h
@@ -467,7 +467,7 @@ class DictEncoder : public Encoder<DType> {
}
}
- virtual ~DictEncoder() { DCHECK(buffered_indices_.empty()); }
+ ~DictEncoder() override { DCHECK(buffered_indices_.empty()); }
// TODO(wesm): think about how to address the construction semantics in
// encodings/dictionary-encoding.h
@@ -482,8 +482,9 @@ class DictEncoder : public Encoder<DType> {
// reserve
// an extra "RleEncoder::MinBufferSize" bytes. These extra bytes won't be
used
// but not reserving them would cause the encoder to fail.
- return 1 + ::arrow::RleEncoder::MaxBufferSize(
- bit_width(), static_cast<int>(buffered_indices_.size())) +
+ return 1 +
+ ::arrow::RleEncoder::MaxBufferSize(
+ bit_width(), static_cast<int>(buffered_indices_.size())) +
::arrow::RleEncoder::MinBufferSize(bit_width());
}
diff --git a/src/parquet/encoding-test.cc b/src/parquet/encoding-test.cc
index a658cb2f..0da32cf0 100644
--- a/src/parquet/encoding-test.cc
+++ b/src/parquet/encoding-test.cc
@@ -30,8 +30,8 @@
#include "parquet/util/memory.h"
#include "parquet/util/test-common.h"
-using arrow::default_memory_pool;
using arrow::MemoryPool;
+using arrow::default_memory_pool;
using std::string;
using std::vector;
diff --git a/src/parquet/file_reader.cc b/src/parquet/file_reader.cc
index 7b748120..e3280c60 100644
--- a/src/parquet/file_reader.cc
+++ b/src/parquet/file_reader.cc
@@ -146,7 +146,7 @@ class SerializedFile : public ParquetFileReader::Contents {
const ReaderProperties& props = default_reader_properties())
: source_(std::move(source)), properties_(props) {}
- ~SerializedFile() {
+ ~SerializedFile() override {
try {
Close();
} catch (...) {
diff --git a/src/parquet/file_writer.cc b/src/parquet/file_writer.cc
index 87ee4f60..1e4a09e2 100644
--- a/src/parquet/file_writer.cc
+++ b/src/parquet/file_writer.cc
@@ -198,7 +198,7 @@ class FileSerializer : public ParquetFileWriter::Contents {
return row_group_writer_.get();
}
- ~FileSerializer() {
+ ~FileSerializer() override {
try {
Close();
} catch (...) {
diff --git a/src/parquet/schema.cc b/src/parquet/schema.cc
index 826ef766..cbe72c64 100644
--- a/src/parquet/schema.cc
+++ b/src/parquet/schema.cc
@@ -430,7 +430,6 @@ class SchemaVisitor : public Node::ConstVisitor {
public:
explicit SchemaVisitor(std::vector<format::SchemaElement>* elements)
: elements_(elements) {}
- virtual ~SchemaVisitor() {}
void Visit(const Node* node) override {
format::SchemaElement element;
@@ -593,10 +592,10 @@ void PrintSchema(const Node* schema, std::ostream&
stream, int indent_width) {
} // namespace schema
using schema::ColumnPath;
+using schema::GroupNode;
using schema::Node;
using schema::NodePtr;
using schema::PrimitiveNode;
-using schema::GroupNode;
void SchemaDescriptor::Init(std::unique_ptr<schema::Node> schema) {
Init(NodePtr(schema.release()));
@@ -606,7 +605,6 @@ class SchemaUpdater : public Node::Visitor {
public:
explicit SchemaUpdater(const std::vector<ColumnOrder>& column_orders)
: column_orders_(column_orders), leaf_count_(0) {}
- virtual ~SchemaUpdater() {}
void Visit(Node* node) override {
if (node->is_group()) {
diff --git a/src/parquet/statistics-test.cc b/src/parquet/statistics-test.cc
index ec8f90a7..1bbef26e 100644
--- a/src/parquet/statistics-test.cc
+++ b/src/parquet/statistics-test.cc
@@ -36,14 +36,14 @@
#include "parquet/types.h"
#include "parquet/util/memory.h"
-using arrow::default_memory_pool;
using arrow::MemoryPool;
+using arrow::default_memory_pool;
namespace parquet {
+using schema::GroupNode;
using schema::NodePtr;
using schema::PrimitiveNode;
-using schema::GroupNode;
namespace test {
diff --git a/src/parquet/statistics.cc b/src/parquet/statistics.cc
index 4c696322..416557c0 100644
--- a/src/parquet/statistics.cc
+++ b/src/parquet/statistics.cc
@@ -23,8 +23,8 @@
#include "parquet/statistics.h"
#include "parquet/util/memory.h"
-using arrow::default_memory_pool;
using arrow::MemoryPool;
+using arrow::default_memory_pool;
namespace parquet {
diff --git a/src/parquet/test-util.h b/src/parquet/test-util.h
index ac6d0a12..a507dfbe 100644
--- a/src/parquet/test-util.h
+++ b/src/parquet/test-util.h
@@ -37,8 +37,8 @@
#include "parquet/util/memory.h"
#include "parquet/util/test-common.h"
-using std::vector;
using std::shared_ptr;
+using std::vector;
namespace parquet {
diff --git a/src/parquet/util/comparison.h b/src/parquet/util/comparison.h
index 12be7baf..7070a0f3 100644
--- a/src/parquet/util/comparison.h
+++ b/src/parquet/util/comparison.h
@@ -38,7 +38,6 @@ class PARQUET_EXPORT CompareDefault : public Comparator {
public:
typedef typename DType::c_type T;
CompareDefault() {}
- virtual ~CompareDefault() {}
virtual bool operator()(const T& a, const T& b) { return a < b; }
};
@@ -46,7 +45,6 @@ template <>
class PARQUET_EXPORT CompareDefault<Int96Type> : public Comparator {
public:
CompareDefault() {}
- virtual ~CompareDefault() {}
virtual bool operator()(const Int96& a, const Int96& b) {
// Only the MSB bit is by Signed comparison
// For little-endian, this is the last bit of Int96 type
@@ -65,7 +63,6 @@ template <>
class PARQUET_EXPORT CompareDefault<ByteArrayType> : public Comparator {
public:
CompareDefault() {}
- virtual ~CompareDefault() {}
virtual bool operator()(const ByteArray& a, const ByteArray& b) {
const int8_t* aptr = reinterpret_cast<const int8_t*>(a.ptr);
const int8_t* bptr = reinterpret_cast<const int8_t*>(b.ptr);
@@ -77,7 +74,6 @@ template <>
class PARQUET_EXPORT CompareDefault<FLBAType> : public Comparator {
public:
explicit CompareDefault(int length) : type_length_(length) {}
- virtual ~CompareDefault() {}
virtual bool operator()(const FLBA& a, const FLBA& b) {
const int8_t* aptr = reinterpret_cast<const int8_t*>(a.ptr);
const int8_t* bptr = reinterpret_cast<const int8_t*>(b.ptr);
@@ -117,7 +113,6 @@ PARQUET_EXTERN_TEMPLATE CompareDefault<FLBAType>;
// Define Unsigned Comparators
class PARQUET_EXPORT CompareUnsignedInt32 : public CompareDefaultInt32 {
public:
- virtual ~CompareUnsignedInt32() {}
bool operator()(const int32_t& a, const int32_t& b) override {
const uint32_t ua = a;
const uint32_t ub = b;
@@ -127,7 +122,6 @@ class PARQUET_EXPORT CompareUnsignedInt32 : public
CompareDefaultInt32 {
class PARQUET_EXPORT CompareUnsignedInt64 : public CompareDefaultInt64 {
public:
- virtual ~CompareUnsignedInt64() {}
bool operator()(const int64_t& a, const int64_t& b) override {
const uint64_t ua = a;
const uint64_t ub = b;
@@ -137,7 +131,6 @@ class PARQUET_EXPORT CompareUnsignedInt64 : public
CompareDefaultInt64 {
class PARQUET_EXPORT CompareUnsignedInt96 : public CompareDefaultInt96 {
public:
- virtual ~CompareUnsignedInt96() {}
bool operator()(const Int96& a, const Int96& b) override {
if (a.value[2] != b.value[2]) {
return (a.value[2] < b.value[2]);
@@ -150,7 +143,6 @@ class PARQUET_EXPORT CompareUnsignedInt96 : public
CompareDefaultInt96 {
class PARQUET_EXPORT CompareUnsignedByteArray : public CompareDefaultByteArray
{
public:
- virtual ~CompareUnsignedByteArray() {}
bool operator()(const ByteArray& a, const ByteArray& b) override {
const uint8_t* aptr = reinterpret_cast<const uint8_t*>(a.ptr);
const uint8_t* bptr = reinterpret_cast<const uint8_t*>(b.ptr);
@@ -161,7 +153,6 @@ class PARQUET_EXPORT CompareUnsignedByteArray : public
CompareDefaultByteArray {
class PARQUET_EXPORT CompareUnsignedFLBA : public CompareDefaultFLBA {
public:
explicit CompareUnsignedFLBA(int length) : CompareDefaultFLBA(length) {}
- virtual ~CompareUnsignedFLBA() {}
bool operator()(const FLBA& a, const FLBA& b) override {
const uint8_t* aptr = reinterpret_cast<const uint8_t*>(a.ptr);
const uint8_t* bptr = reinterpret_cast<const uint8_t*>(b.ptr);
diff --git a/src/parquet/util/memory-test.cc b/src/parquet/util/memory-test.cc
index ee5fe313..17ade21e 100644
--- a/src/parquet/util/memory-test.cc
+++ b/src/parquet/util/memory-test.cc
@@ -27,8 +27,8 @@
#include "parquet/util/memory.h"
#include "parquet/util/test-common.h"
-using arrow::default_memory_pool;
using arrow::MemoryPool;
+using arrow::default_memory_pool;
namespace parquet {
diff --git a/src/parquet/util/schema-util.h b/src/parquet/util/schema-util.h
index ef9087b3..4e31d3ca 100644
--- a/src/parquet/util/schema-util.h
+++ b/src/parquet/util/schema-util.h
@@ -26,12 +26,12 @@
#include "parquet/schema.h"
#include "parquet/types.h"
+using parquet::LogicalType;
using parquet::ParquetException;
using parquet::SchemaDescriptor;
using parquet::schema::GroupNode;
-using parquet::schema::NodePtr;
using parquet::schema::Node;
-using parquet::LogicalType;
+using parquet::schema::NodePtr;
inline bool str_endswith_tuple(const std::string& str) {
if (str.size() >= 6) {
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
> [C++] Fix new build warnings with clang 5.0
> -------------------------------------------
>
> Key: PARQUET-1226
> URL: https://issues.apache.org/jira/browse/PARQUET-1226
> Project: Parquet
> Issue Type: Bug
> Components: parquet-cpp
> Reporter: Wes McKinney
> Assignee: Wes McKinney
> Priority: Major
> Fix For: cpp-1.4.0
>
>
> Follow-on work since Apache Arrow has migrated to clang 5.0 in ARROW-2117
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)