This is an automated email from the ASF dual-hosted git repository.

weibin pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-graphar.git


The following commit(s) were added to refs/heads/main by this push:
     new 82a02f1  feat(c++): Update the Catch2 dependency to v3 and remove the 
Catch2 submodule (#511)
82a02f1 is described below

commit 82a02f17a503332e249654b84a0a0ca9798e40ac
Author: Weibin Zeng <[email protected]>
AuthorDate: Tue Jun 4 18:03:23 2024 +0800

    feat(c++): Update the Catch2 dependency to v3 and remove the Catch2 
submodule (#511)
---
 .github/workflows/ci.yml            |   7 +
 .gitmodules                         |   3 -
 cpp/Brewfile                        |   1 +
 cpp/CMakeLists.txt                  |  19 +--
 cpp/README.md                       |   1 +
 cpp/test/test_arrow_chunk_reader.cc |  22 ++--
 cpp/test/test_arrow_chunk_writer.cc | 250 ++++++++++++++++++------------------
 cpp/test/test_builder.cc            |   4 +-
 cpp/test/test_chunk_info_reader.cc  |   3 +-
 cpp/test/test_graph.cc              |   3 +-
 cpp/test/test_info.cc               |   5 +-
 cpp/thirdparty/Catch2               |   1 -
 12 files changed, 157 insertions(+), 162 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e81b49e..dbf3e0e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -69,6 +69,13 @@ jobs:
         sudo make install
         popd
 
+        # install Catch2 v3
+        git clone --branch v3.6.0 https://github.com/catchorg/Catch2.git 
--depth 1
+        pushd Catch2
+        cmake -Bbuild -H. -DBUILD_TESTING=OFF
+        sudo cmake --build build/ --target install
+        popd
+
     - name: CMake
       working-directory: "cpp"
       run: |
diff --git a/.gitmodules b/.gitmodules
index 2168eb6..7be3ea6 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,6 +1,3 @@
 [submodule "testing"]
        path = testing
        url = https://github.com/apache/incubator-graphar-testing.git
-[submodule "cpp/thirdparty/Catch2"]
-       path = cpp/thirdparty/Catch2
-       url = https://github.com/catchorg/Catch2.git
diff --git a/cpp/Brewfile b/cpp/Brewfile
index 229975a..889d3c9 100644
--- a/cpp/Brewfile
+++ b/cpp/Brewfile
@@ -22,3 +22,4 @@ brew "boost"
 brew "doxygen"
 brew "git"
 brew "ccache"
+brew "catch2"
diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt
index 6b84311..fe81d18 100644
--- a/cpp/CMakeLists.txt
+++ b/cpp/CMakeLists.txt
@@ -159,15 +159,6 @@ macro(find_yaml_cpp)
     set(CMAKE_WARN_DEPRECATED ON CACHE BOOL "" FORCE)
 endmacro()
 
-macro(find_catch2)
-    set(MESSAGE_QUIET ON)
-    set(CMAKE_WARN_DEPRECATED OFF CACHE BOOL "" FORCE)
-    add_subdirectory_shared(thirdparty/Catch2)
-    list(APPEND CMAKE_MODULE_PATH 
"${CMAKE_CURRENT_SOURCE_DIR}/thirdparty/Catch2/contrib")
-    unset(MESSAGE_QUIET)
-    set(CMAKE_WARN_DEPRECATED ON CACHE BOOL "" FORCE)
-endmacro()
-
 macro(install_graphar_target target)
   # install
   install(TARGETS ${target}
@@ -226,7 +217,7 @@ macro(build_graphar)
                                           
$<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/include>
     )
     target_include_directories(graphar PRIVATE 
${CMAKE_CURRENT_SOURCE_DIR}/thirdparty)
-    target_link_libraries(graphar PRIVATE Threads::Threads ${CMAKE_DL_LIBS})
+    target_link_libraries(graphar PRIVATE ${CMAKE_DL_LIBS})
 
     if(APPLE)
         target_link_libraries(graphar PRIVATE -Wl,-force_load 
Arrow::arrow_shared
@@ -261,7 +252,7 @@ if (BUILD_EXAMPLES)
                                                      
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/thirdparty/Catch2/single_include>
         )
         target_include_directories(${E_NAME} SYSTEM PRIVATE 
${Boost_INCLUDE_DIRS})
-        target_link_libraries(${E_NAME} PRIVATE graphar ${Boost_LIBRARIES} 
Threads::Threads ${CMAKE_DL_LIBS})
+        target_link_libraries(${E_NAME} PRIVATE graphar ${Boost_LIBRARIES} 
${CMAKE_DL_LIBS})
         if(APPLE)
             target_link_libraries(${E_NAME} PRIVATE Arrow::arrow_shared
                 Parquet::parquet_shared)
@@ -309,7 +300,7 @@ install(EXPORT graphar-targets
 # Test targets
 # 
------------------------------------------------------------------------------
 if (BUILD_TESTS)
-    find_catch2()
+    find_package(Catch2 3 REQUIRED)
 
     macro(add_test target)
         set(options)
@@ -319,7 +310,7 @@ if (BUILD_TESTS)
         add_executable(${target} ${add_test_SRCS})
         target_compile_features(${target} PRIVATE cxx_std_17)
         target_include_directories(${target} PRIVATE 
${PROJECT_SOURCE_DIR}/thirdparty)
-        target_link_libraries(${target} PRIVATE Catch2::Catch2 graphar 
Threads::Threads ${CMAKE_DL_LIBS})
+        target_link_libraries(${target} PRIVATE Catch2::Catch2WithMain graphar 
${CMAKE_DL_LIBS})
         if(APPLE)
             target_link_libraries(${target} PRIVATE Arrow::arrow_shared
                 Parquet::parquet_shared)
@@ -354,7 +345,7 @@ if (BUILD_BENCHMARKS)
         add_executable(${target} ${add_test_SRCS})
         target_compile_features(${target} PRIVATE cxx_std_17)
         target_include_directories(${target} PRIVATE 
${PROJECT_SOURCE_DIR}/thirdparty)
-        target_link_libraries(${target} PRIVATE benchmark::benchmark_main 
graphar Threads::Threads ${CMAKE_DL_LIBS})
+        target_link_libraries(${target} PRIVATE benchmark::benchmark_main 
graphar ${CMAKE_DL_LIBS})
     endmacro()
     add_benchmark(arrow_chunk_reader_benchmark SRCS 
benchmarks/arrow_chunk_reader_benchmark.cc)
     add_benchmark(graph_info_benchmark SRCS benchmarks/graph_info_benchmark.cc)
diff --git a/cpp/README.md b/cpp/README.md
index ec06146..38a9eab 100644
--- a/cpp/README.md
+++ b/cpp/README.md
@@ -32,6 +32,7 @@ Dependencies for optional features:
 - `clang-format-8` for code formatting
 - [BGL](https://www.boost.org/doc/libs/1_80_0/libs/graph/doc/index.html) (>= 
1.58)
 - [Google Benchmark](https://github.com/google/benchmark) (>= 1.6.0) for 
benchmarking
+- [Catch2](https://github.com/catchorg/Catch2) v3 for unit testing
 
 On Ubuntu/Debian, you can install the required packages with:
 
diff --git a/cpp/test/test_arrow_chunk_reader.cc 
b/cpp/test/test_arrow_chunk_reader.cc
index c2b4a23..513216d 100644
--- a/cpp/test/test_arrow_chunk_reader.cc
+++ b/cpp/test/test_arrow_chunk_reader.cc
@@ -29,9 +29,7 @@
 #include "graphar/util/filesystem.h"
 #include "graphar/util/general_params.h"
 
-#define CATCH_CONFIG_MAIN
-#include <catch2/catch.hpp>
-
+#include <catch2/catch_test_macros.hpp>
 namespace graphar {
 
 TEST_CASE("ArrowChunkReader") {
@@ -133,7 +131,9 @@ TEST_CASE("ArrowChunkReader") {
     SECTION("PropertyPushDown") {
       std::string filter_property = "gender";
       auto filter = _Equal(_Property(filter_property), _Literal("female"));
-      std::vector<std::string> expected_cols{"firstName", "lastName"};
+      std::vector<std::string> expected_cols;
+      expected_cols.push_back("firstName");
+      expected_cols.push_back("lastName");
       // print reader result
       auto walkReader =
           [&](std::shared_ptr<VertexPropertyArrowChunkReader>& reader) {
@@ -150,14 +150,16 @@ TEST_CASE("ArrowChunkReader") {
               sum += table->num_rows();
             } while (!reader->next_chunk().IsIndexError());
             REQUIRE(idx == reader->GetChunkNum());
-            REQUIRE(table->num_columns() == (int) expected_cols.size());
+            REQUIRE(table->num_columns() ==
+                    static_cast<int>(expected_cols.size()));
 
             std::cout << "Total Nums: " << sum << "/"
                       << reader->GetChunkNum() * vertex_info->GetChunkSize()
                       << '\n';
             std::cout << "Column Nums: " << table->num_columns() << "\n";
             std::cout << "Column Names: ";
-            for (int i = 0; i < table->num_columns(); i++) {
+            for (int i = 0;
+                 i < table->num_columns() && i < expected_cols.size(); i++) {
               REQUIRE(table->ColumnNames()[i] == expected_cols[i]);
               std::cout << "`" << table->ColumnNames()[i] << "` ";
             }
@@ -205,10 +207,11 @@ TEST_CASE("ArrowChunkReader") {
       SECTION("pushdown column that don't exist") {
         std::cout << "Vertex property pushdown column that don't exist:\n";
         auto filter = _Literal(true);
-        std::vector<std::string> expected_cols{"id"};
+        std::vector<std::string> expected_cols_2;
+        expected_cols_2.push_back("id");
         util::FilterOptions options;
         options.filter = filter;
-        options.columns = expected_cols;
+        options.columns = expected_cols_2;
         auto maybe_reader = VertexPropertyArrowChunkReader::Make(
             graph_info, src_label, filter_property, options);
         REQUIRE(maybe_reader.status().ok());
@@ -355,7 +358,8 @@ TEST_CASE("ArrowChunkReader") {
           _Equal(_Property(edge_property_name), _Property(edge_property_name));
       auto filter = _And(expr1, expr2);
 
-      std::vector<std::string> expected_cols{"creationDate"};
+      std::vector<std::string> expected_cols;
+      expected_cols.push_back("creationDate");
 
       util::FilterOptions options;
       options.filter = filter;
diff --git a/cpp/test/test_arrow_chunk_writer.cc 
b/cpp/test/test_arrow_chunk_writer.cc
index ecb69a9..3978ba9 100644
--- a/cpp/test/test_arrow_chunk_writer.cc
+++ b/cpp/test/test_arrow_chunk_writer.cc
@@ -39,12 +39,11 @@
 #include "graphar/util/yaml.h"
 #include "graphar/writer/arrow_chunk_writer.h"
 
-#define CATCH_CONFIG_MAIN
-#include <catch2/catch.hpp>
+#include <catch2/catch_test_macros.hpp>
 
 namespace graphar {
 
-TEST_CASE("test_vertex_property_writer_from_file") {
+TEST_CASE("TestVertexPropertyWriter") {
   std::string path = TEST_DATA_DIR + "/ldbc_sample/person_0_0.csv";
   arrow::io::IOContext io_context = arrow::io::default_io_context();
 
@@ -116,141 +115,142 @@ TEST_CASE("test_vertex_property_writer_from_file") {
   // Invalid data type
   auto pg3 = vertex_info->GetPropertyGroup("id");
   REQUIRE(writer->WriteTable(tmp_table, pg3, 0).IsTypeError());
-}
 
-TEST_CASE("test_orc_and_parquet_reader") {
-  arrow::Status st;
-  arrow::MemoryPool* pool = arrow::default_memory_pool();
-  std::string path1 = TEST_DATA_DIR + "/ldbc_sample/orc" +
-                      "/vertex/person/firstName_lastName_gender/chunk1";
-  std::string path2 = TEST_DATA_DIR + "/ldbc_sample/parquet" +
-                      "/vertex/person/firstName_lastName_gender/chunk1";
-  arrow::io::IOContext io_context = arrow::io::default_io_context();
+  SECTION("TestOrcParquetReader") {
+    arrow::Status st;
+    arrow::MemoryPool* pool = arrow::default_memory_pool();
+    std::string path1 = TEST_DATA_DIR + "/ldbc_sample/orc" +
+                        "/vertex/person/firstName_lastName_gender/chunk1";
+    std::string path2 = TEST_DATA_DIR + "/ldbc_sample/parquet" +
+                        "/vertex/person/firstName_lastName_gender/chunk1";
+    arrow::io::IOContext io_context = arrow::io::default_io_context();
 
-  // Open ORC file reader
-  auto fs1 = arrow::fs::FileSystemFromUriOrPath(path1).ValueOrDie();
-  std::shared_ptr<arrow::io::RandomAccessFile> input1 =
-      fs1->OpenInputFile(path1).ValueOrDie();
-  std::unique_ptr<arrow::adapters::orc::ORCFileReader> reader =
-      arrow::adapters::orc::ORCFileReader::Open(input1, pool).ValueOrDie();
+    // Open ORC file reader
+    auto fs1 = arrow::fs::FileSystemFromUriOrPath(path1).ValueOrDie();
+    std::shared_ptr<arrow::io::RandomAccessFile> input1 =
+        fs1->OpenInputFile(path1).ValueOrDie();
+    std::unique_ptr<arrow::adapters::orc::ORCFileReader> reader =
+        arrow::adapters::orc::ORCFileReader::Open(input1, pool).ValueOrDie();
 
-  // Read entire file as a single Arrow table
-  auto maybe_table = reader->Read();
-  std::shared_ptr<arrow::Table> table1 = maybe_table.ValueOrDie();
+    // Read entire file as a single Arrow table
+    auto maybe_table = reader->Read();
+    std::shared_ptr<arrow::Table> table1 = maybe_table.ValueOrDie();
 
-  // Open Parquet file reader
-  auto fs2 = arrow::fs::FileSystemFromUriOrPath(path2).ValueOrDie();
-  std::shared_ptr<arrow::io::RandomAccessFile> input2 =
-      fs2->OpenInputFile(path2).ValueOrDie();
-  std::unique_ptr<parquet::arrow::FileReader> arrow_reader;
-  st = parquet::arrow::OpenFile(input2, pool, &arrow_reader);
+    // Open Parquet file reader
+    auto fs2 = arrow::fs::FileSystemFromUriOrPath(path2).ValueOrDie();
+    std::shared_ptr<arrow::io::RandomAccessFile> input2 =
+        fs2->OpenInputFile(path2).ValueOrDie();
+    std::unique_ptr<parquet::arrow::FileReader> arrow_reader;
+    st = parquet::arrow::OpenFile(input2, pool, &arrow_reader);
 
-  // Read entire file as a single Arrow table
-  std::shared_ptr<arrow::Table> table2;
-  st = arrow_reader->ReadTable(&table2);
+    // Read entire file as a single Arrow table
+    std::shared_ptr<arrow::Table> table2;
+    st = arrow_reader->ReadTable(&table2);
 
-  REQUIRE(table1->GetColumnByName("firstName")->ToString() ==
-          table2->GetColumnByName("firstName")->ToString());
-  REQUIRE(table1->GetColumnByName("lastName")->ToString() ==
-          table2->GetColumnByName("lastName")->ToString());
-  REQUIRE(table1->GetColumnByName("gender")->ToString() ==
-          table2->GetColumnByName("gender")->ToString());
-}
+    REQUIRE(table1->GetColumnByName("firstName")->ToString() ==
+            table2->GetColumnByName("firstName")->ToString());
+    REQUIRE(table1->GetColumnByName("lastName")->ToString() ==
+            table2->GetColumnByName("lastName")->ToString());
+    REQUIRE(table1->GetColumnByName("gender")->ToString() ==
+            table2->GetColumnByName("gender")->ToString());
+  }
 
-TEST_CASE("test_edge_chunk_writer") {
-  arrow::Status st;
-  arrow::MemoryPool* pool = arrow::default_memory_pool();
-  std::string path = TEST_DATA_DIR +
-                     "/ldbc_sample/parquet/edge/person_knows_person/"
-                     "unordered_by_source/adj_list/part0/chunk0";
-  auto fs = arrow::fs::FileSystemFromUriOrPath(path).ValueOrDie();
-  std::shared_ptr<arrow::io::RandomAccessFile> input =
-      fs->OpenInputFile(path).ValueOrDie();
-  std::unique_ptr<parquet::arrow::FileReader> arrow_reader;
-  st = parquet::arrow::OpenFile(input, pool, &arrow_reader);
-  // Read entire file as a single Arrow table
-  std::shared_ptr<arrow::Table> maybe_table;
-  st = arrow_reader->ReadTable(&maybe_table);
-  REQUIRE(st.ok());
+  SECTION("TestEdgeChunkWriter") {
+    arrow::Status st;
+    arrow::MemoryPool* pool = arrow::default_memory_pool();
+    std::string path = TEST_DATA_DIR +
+                       "/ldbc_sample/parquet/edge/person_knows_person/"
+                       "unordered_by_source/adj_list/part0/chunk0";
+    auto fs = arrow::fs::FileSystemFromUriOrPath(path).ValueOrDie();
+    std::shared_ptr<arrow::io::RandomAccessFile> input =
+        fs->OpenInputFile(path).ValueOrDie();
+    std::unique_ptr<parquet::arrow::FileReader> arrow_reader;
+    st = parquet::arrow::OpenFile(input, pool, &arrow_reader);
+    // Read entire file as a single Arrow table
+    std::shared_ptr<arrow::Table> maybe_table;
+    st = arrow_reader->ReadTable(&maybe_table);
+    REQUIRE(st.ok());
 
-  std::shared_ptr<arrow::Table> table =
-      maybe_table
-          ->RenameColumns(
-              {GeneralParams::kSrcIndexCol, GeneralParams::kDstIndexCol})
-          .ValueOrDie();
-  std::cout << table->schema()->ToString() << std::endl;
-  std::cout << table->num_rows() << ' ' << table->num_columns() << std::endl;
+    std::shared_ptr<arrow::Table> table =
+        maybe_table
+            ->RenameColumns(
+                {GeneralParams::kSrcIndexCol, GeneralParams::kDstIndexCol})
+            .ValueOrDie();
+    std::cout << table->schema()->ToString() << std::endl;
+    std::cout << table->num_rows() << ' ' << table->num_columns() << std::endl;
 
-  // Construct the writer
-  std::string edge_meta_file =
-      TEST_DATA_DIR + "/ldbc_sample/csv/" + "person_knows_person.edge.yml";
-  auto edge_meta = Yaml::LoadFile(edge_meta_file).value();
-  auto edge_info = EdgeInfo::Load(edge_meta).value();
-  auto adj_list_type = AdjListType::ordered_by_source;
-  auto maybe_writer = EdgeChunkWriter::Make(edge_info, "/tmp/", adj_list_type);
-  REQUIRE(!maybe_writer.has_error());
-  auto writer = maybe_writer.value();
+    // Construct the writer
+    std::string edge_meta_file =
+        TEST_DATA_DIR + "/ldbc_sample/csv/" + "person_knows_person.edge.yml";
+    auto edge_meta = Yaml::LoadFile(edge_meta_file).value();
+    auto edge_info = EdgeInfo::Load(edge_meta).value();
+    auto adj_list_type = AdjListType::ordered_by_source;
+    auto maybe_writer =
+        EdgeChunkWriter::Make(edge_info, "/tmp/", adj_list_type);
+    REQUIRE(!maybe_writer.has_error());
+    auto writer = maybe_writer.value();
 
-  // Get & set validate level
-  REQUIRE(writer->GetValidateLevel() == ValidateLevel::no_validate);
-  writer->SetValidateLevel(ValidateLevel::strong_validate);
-  REQUIRE(writer->GetValidateLevel() == ValidateLevel::strong_validate);
+    // Get & set validate level
+    REQUIRE(writer->GetValidateLevel() == ValidateLevel::no_validate);
+    writer->SetValidateLevel(ValidateLevel::strong_validate);
+    REQUIRE(writer->GetValidateLevel() == ValidateLevel::strong_validate);
 
-  // Valid cases
-  // Write adj list of vertex chunk 0 to files
-  REQUIRE(writer->SortAndWriteAdjListTable(table, 0, 0).ok());
-  // Write number of edges for vertex chunk 0
-  REQUIRE(writer->WriteEdgesNum(0, table->num_rows()).ok());
-  // Write number of vertices
-  REQUIRE(writer->WriteVerticesNum(903).ok());
+    // Valid cases
+    // Write adj list of vertex chunk 0 to files
+    REQUIRE(writer->SortAndWriteAdjListTable(table, 0, 0).ok());
+    // Write number of edges for vertex chunk 0
+    REQUIRE(writer->WriteEdgesNum(0, table->num_rows()).ok());
+    // Write number of vertices
+    REQUIRE(writer->WriteVerticesNum(903).ok());
 
-  // Check the number of edges
-  std::shared_ptr<arrow::io::InputStream> input2 =
-      fs->OpenInputStream(
-            "/tmp/edge/person_knows_person/ordered_by_source/edge_count0")
-          .ValueOrDie();
-  auto edge_num = input2->Read(sizeof(IdType)).ValueOrDie();
-  const IdType* edge_num_ptr =
-      reinterpret_cast<const IdType*>(edge_num->data());
-  REQUIRE((*edge_num_ptr) == table->num_rows());
+    // Check the number of edges
+    std::shared_ptr<arrow::io::InputStream> input2 =
+        fs->OpenInputStream(
+              "/tmp/edge/person_knows_person/ordered_by_source/edge_count0")
+            .ValueOrDie();
+    auto edge_num = input2->Read(sizeof(IdType)).ValueOrDie();
+    const IdType* edge_num_ptr =
+        reinterpret_cast<const IdType*>(edge_num->data());
+    REQUIRE((*edge_num_ptr) == table->num_rows());
 
-  // Check the number of vertices
-  std::shared_ptr<arrow::io::InputStream> input3 =
-      fs->OpenInputStream(
-            "/tmp/edge/person_knows_person/ordered_by_source/vertex_count")
-          .ValueOrDie();
-  auto vertex_num = input3->Read(sizeof(IdType)).ValueOrDie();
-  const IdType* vertex_num_ptr =
-      reinterpret_cast<const IdType*>(vertex_num->data());
-  REQUIRE((*vertex_num_ptr) == 903);
+    // Check the number of vertices
+    std::shared_ptr<arrow::io::InputStream> input3 =
+        fs->OpenInputStream(
+              "/tmp/edge/person_knows_person/ordered_by_source/vertex_count")
+            .ValueOrDie();
+    auto vertex_num = input3->Read(sizeof(IdType)).ValueOrDie();
+    const IdType* vertex_num_ptr =
+        reinterpret_cast<const IdType*>(vertex_num->data());
+    REQUIRE((*vertex_num_ptr) == 903);
 
-  // Invalid cases
-  // Invalid count or index
-  REQUIRE(writer->WriteEdgesNum(-1, 0).IsIndexError());
-  REQUIRE(writer->WriteEdgesNum(0, -1).IsIndexError());
-  REQUIRE(writer->WriteVerticesNum(-1).IsIndexError());
-  // Out of range
-  REQUIRE(writer->WriteOffsetChunk(table, 0).IsInvalid());
-  // Invalid chunk id
-  REQUIRE(writer->WriteAdjListChunk(table, -1, 0).IsIndexError());
-  REQUIRE(writer->WriteAdjListChunk(table, 0, -1).IsIndexError());
-  // Invalid adj list type
-  auto invalid_adj_list_type = AdjListType::unordered_by_dest;
-  auto maybe_writer2 =
-      EdgeChunkWriter::Make(edge_info, "/tmp/", invalid_adj_list_type);
-  REQUIRE(maybe_writer2.has_error());
-  // Invalid property group
-  Property p1("invalid_property", int32(), false);
-  auto pg1 = CreatePropertyGroup({p1}, FileType::CSV);
-  REQUIRE(writer->WritePropertyChunk(table, pg1, 0, 0).IsKeyError());
-  // Property not found in table
-  auto pg2 = edge_info->GetPropertyGroup("creationDate");
-  REQUIRE(writer->WritePropertyChunk(table, pg2, 0, 0).IsInvalid());
-  // Required columns not found
-  std::shared_ptr<arrow::Table> tmp_table =
-      table->RenameColumns({"creationDate", "tmp_property"}).ValueOrDie();
-  REQUIRE(writer->WriteAdjListChunk(tmp_table, 0, 0).IsInvalid());
-  // Invalid data type
-  REQUIRE(writer->WritePropertyChunk(tmp_table, pg2, 0, 0).IsTypeError());
+    // Invalid cases
+    // Invalid count or index
+    REQUIRE(writer->WriteEdgesNum(-1, 0).IsIndexError());
+    REQUIRE(writer->WriteEdgesNum(0, -1).IsIndexError());
+    REQUIRE(writer->WriteVerticesNum(-1).IsIndexError());
+    // Out of range
+    REQUIRE(writer->WriteOffsetChunk(table, 0).IsInvalid());
+    // Invalid chunk id
+    REQUIRE(writer->WriteAdjListChunk(table, -1, 0).IsIndexError());
+    REQUIRE(writer->WriteAdjListChunk(table, 0, -1).IsIndexError());
+    // Invalid adj list type
+    auto invalid_adj_list_type = AdjListType::unordered_by_dest;
+    auto maybe_writer2 =
+        EdgeChunkWriter::Make(edge_info, "/tmp/", invalid_adj_list_type);
+    REQUIRE(maybe_writer2.has_error());
+    // Invalid property group
+    Property p1("invalid_property", int32(), false);
+    auto pg1 = CreatePropertyGroup({p1}, FileType::CSV);
+    REQUIRE(writer->WritePropertyChunk(table, pg1, 0, 0).IsKeyError());
+    // Property not found in table
+    auto pg2 = edge_info->GetPropertyGroup("creationDate");
+    REQUIRE(writer->WritePropertyChunk(table, pg2, 0, 0).IsInvalid());
+    // Required columns not found
+    std::shared_ptr<arrow::Table> tmp_table =
+        table->RenameColumns({"creationDate", "tmp_property"}).ValueOrDie();
+    REQUIRE(writer->WriteAdjListChunk(tmp_table, 0, 0).IsInvalid());
+    // Invalid data type
+    REQUIRE(writer->WritePropertyChunk(tmp_table, pg2, 0, 0).IsTypeError());
+  }
 }
 }  // namespace graphar
diff --git a/cpp/test/test_builder.cc b/cpp/test/test_builder.cc
index d80bf56..fed3b83 100644
--- a/cpp/test/test_builder.cc
+++ b/cpp/test/test_builder.cc
@@ -39,9 +39,7 @@
 #include "graphar/writer/edges_builder.h"
 #include "graphar/writer/vertices_builder.h"
 
-#define CATCH_CONFIG_MAIN
-#include <catch2/catch.hpp>
-
+#include <catch2/catch_test_macros.hpp>
 namespace graphar {
 TEST_CASE("test_vertices_builder") {
   std::cout << "Test vertex builder" << std::endl;
diff --git a/cpp/test/test_chunk_info_reader.cc 
b/cpp/test/test_chunk_info_reader.cc
index c92e93a..90f1371 100644
--- a/cpp/test/test_chunk_info_reader.cc
+++ b/cpp/test/test_chunk_info_reader.cc
@@ -24,8 +24,7 @@
 #include "graphar/reader/chunk_info_reader.h"
 #include "graphar/util/adj_list_type.h"
 
-#define CATCH_CONFIG_MAIN
-#include <catch2/catch.hpp>
+#include <catch2/catch_test_macros.hpp>
 
 namespace graphar {
 
diff --git a/cpp/test/test_graph.cc b/cpp/test/test_graph.cc
index 385e1fd..b939539 100644
--- a/cpp/test/test_graph.cc
+++ b/cpp/test/test_graph.cc
@@ -23,8 +23,7 @@
 #include "graphar/graph.h"
 #include "graphar/util/data_type.h"
 
-#define CATCH_CONFIG_MAIN
-#include <catch2/catch.hpp>
+#include <catch2/catch_test_macros.hpp>
 
 namespace graphar {
 TEST_CASE("Graph") {
diff --git a/cpp/test/test_info.cc b/cpp/test/test_info.cc
index 5d8c17e..9d4ccad 100644
--- a/cpp/test/test_info.cc
+++ b/cpp/test/test_info.cc
@@ -31,8 +31,7 @@
 #include "graphar/util/filesystem.h"
 #include "graphar/util/version_parser.h"
 
-#define CATCH_CONFIG_MAIN
-#include <catch2/catch.hpp>
+#include <catch2/catch_test_macros.hpp>
 
 namespace graphar {
 
@@ -780,7 +779,7 @@ extra_info:
   }
 }
 
-TEST_CASE("LoadFromS3", "[!hide]") {
+TEST_CASE("LoadFromS3", "[.hidden]") {
   std::string path =
       "s3://graphar/ldbc/ldbc.graph.yml"
       "?endpoint_override=graphscope.oss-cn-beijing.aliyuncs.com";
diff --git a/cpp/thirdparty/Catch2 b/cpp/thirdparty/Catch2
deleted file mode 160000
index 20ace55..0000000
--- a/cpp/thirdparty/Catch2
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 20ace5503422a8511036aa9d486435041127e0cf


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to