tolleybot commented on code in PR #34616:
URL: https://github.com/apache/arrow/pull/34616#discussion_r1162926884


##########
cpp/src/arrow/dataset/dataset_encryption_test.cc:
##########
@@ -0,0 +1,247 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/testing/gtest_util.h"
+#include "gtest/gtest.h"
+
+#include <arrow/api.h>
+#include <arrow/dataset/api.h>
+#include "arrow/array/builder_primitive.h"
+#include "arrow/builder.h"
+#include "arrow/dataset/partition.h"
+#include "arrow/filesystem/mockfs.h"
+#include "arrow/status.h"
+#include "arrow/table.h"
+#include "parquet/encryption/dataset_encryption_config.h"
+#include "parquet/encryption/test_in_memory_kms.h"
+
+const char dsFooterMasterKey[] = "0123456789012345";
+const char dsFooterMasterKeyId[] = "footer_key";
+const char* const dsColumnMasterKeys[] = {"1234567890123450"};
+const char* const dsColumnMasterKeyIds[] = {"col_key"};
+
+namespace arrow {
+namespace dataset {
+
+class DatasetEncryptionTest : public ::testing::Test {
+ protected:
+  std::unique_ptr<arrow::internal::TemporaryDir> temp_dir_;
+  std::shared_ptr<::arrow::dataset::InMemoryDataset> dataset_;
+  std::string footer_key_name_ = "footer_key";
+
+  ::parquet::encryption::DatasetEncryptionConfiguration 
dataset_encryption_config_;
+  ::parquet::encryption::DatasetDecryptionConfiguration 
dataset_decryption_config_;
+  std::string column_key_mapping_;
+  ::parquet::encryption::KmsConnectionConfig kms_connection_config_;
+  std::shared_ptr<::parquet::encryption::CryptoFactory> crypto_factory_;
+  std::shared_ptr<ParquetFileFormat> file_format_;
+  std::shared_ptr<::arrow::fs::FileSystem> file_system_;
+
+  /** setup the test
+   *
+   */
+  void SetUp() {
+    // create our mock file system
+    ::arrow::fs::TimePoint mock_now = std::chrono::system_clock::now();
+    ASSERT_OK_AND_ASSIGN(file_system_,
+                         ::arrow::fs::internal::MockFileSystem::Make(mock_now, 
{}));
+    // build our dummy table
+    BuildTable();
+
+    auto key_list = BuildKeyMap(dsColumnMasterKeyIds, dsColumnMasterKeys,
+                                dsFooterMasterKeyId, dsFooterMasterKey);
+
+    SetupCryptoFactory(true, key_list);
+
+    column_key_mapping_ = "col_key: a";
+
+    // Setup our Dataset encrytion configurations
+    dataset_encryption_config_.crypto_factory = crypto_factory_;
+    dataset_encryption_config_.kms_connection_config =
+        std::make_shared<::parquet::encryption::KmsConnectionConfig>(
+            kms_connection_config_);
+    dataset_encryption_config_.encryption_config =
+        std::make_shared<::parquet::encryption::EncryptionConfiguration>(
+            footer_key_name_);
+    dataset_encryption_config_.encryption_config->column_keys = 
column_key_mapping_;
+    dataset_encryption_config_.encryption_config->footer_key = 
footer_key_name_;
+
+    dataset_decryption_config_.crypto_factory = crypto_factory_;
+    dataset_decryption_config_.kms_connection_config =
+        std::make_shared<::parquet::encryption::KmsConnectionConfig>(
+            kms_connection_config_);
+    dataset_decryption_config_.decryption_config =
+        std::make_shared<::parquet::encryption::DecryptionConfiguration>();
+
+    // create our Parquet file format object
+    file_format_ = std::make_shared<ParquetFileFormat>();
+
+    file_format_->SetDatasetEncryptionConfig(
+        
std::make_shared<::parquet::encryption::DatasetEncryptionConfiguration>(
+            dataset_encryption_config_));
+    file_format_->SetDatasetDecryptionConfig(
+        
std::make_shared<::parquet::encryption::DatasetDecryptionConfiguration>(
+            dataset_decryption_config_));
+  }
+
+  /** utility to build the key map
+   *
+   */
+  std::unordered_map<std::string, std::string> BuildKeyMap(const char* const* 
column_ids,
+                                                           const char* const* 
column_keys,
+                                                           const char* 
footer_id,
+                                                           const char* 
footer_key) {
+    std::unordered_map<std::string, std::string> key_map;
+    // add column keys
+    for (int i = 0; i < 1; i++) {
+      key_map.insert({column_ids[i], column_keys[i]});
+    }
+    // add footer key
+    key_map.insert({footer_id, footer_key});
+
+    return key_map;
+  }
+
+  /** utilty to build column key mapping
+   *
+   */
+  std::string BuildColumnKeyMapping() {
+    std::ostringstream stream;
+    stream << dsColumnMasterKeys[0] << ":"
+           << "a"
+           << ";";
+    return stream.str();
+  }
+  /** Write dataset to disk
+   *
+   */
+  void WriteDataset() {
+    auto base_path = "";
+    ASSERT_OK(file_system_->CreateDir(base_path));
+    // Write it using Datasets
+    ASSERT_OK_AND_ASSIGN(auto scanner_builder, dataset_->NewScan());
+    ASSERT_OK_AND_ASSIGN(auto scanner, scanner_builder->Finish());
+
+    auto partition_schema = ::arrow::schema({::arrow::field("part", 
::arrow::utf8())});
+    auto partitioning =
+        std::make_shared<::arrow::dataset::HivePartitioning>(partition_schema);
+    ::arrow::dataset::FileSystemDatasetWriteOptions write_options;
+    write_options.file_write_options = file_format_->DefaultWriteOptions();
+    write_options.filesystem = file_system_;
+    write_options.base_dir = base_path;
+    write_options.partitioning = partitioning;
+    write_options.basename_template = "part{i}.parquet";
+    ASSERT_OK(::arrow::dataset::FileSystemDataset::Write(write_options, 
scanner));
+
+    auto mock_fs =
+        
std::dynamic_pointer_cast<::arrow::fs::internal::MockFileSystem>(file_system_);
+
+    std::vector<std::string> files = {"part=a/part0.parquet", 
"part=b/part0.parquet",
+                                      "part=c/part0.parquet", 
"part=d/part0.parquet",
+                                      "part=e/part0.parquet", 
"part=f/part0.parquet",
+                                      "part=g/part0.parquet", 
"part=h/part0.parquet",
+                                      "part=i/part0.parquet", 
"part=j/part0.parquet"};
+    ValidateFilesExist(mock_fs, files);
+  }
+
+  /** A utility function to validate our files were written out */
+  void ValidateFilesExist(const 
std::shared_ptr<arrow::fs::internal::MockFileSystem>& fs,
+                          const std::vector<std::string>& files) {
+    for (const auto& file_path : files) {
+      ASSERT_OK_AND_ASSIGN(auto result, fs->GetFileInfo(file_path));
+
+      ASSERT_NE(result.type(), arrow::fs::FileType::NotFound);
+    }
+  }
+
+  void ReadDataset() {
+    // File format
+    // Partitioning
+    auto partition_schema = arrow::schema({arrow::field("part", 
arrow::utf8())});
+    auto partitioning =
+        std::make_shared<arrow::dataset::HivePartitioning>(partition_schema);
+
+    // Get FileInfo objects for all files under the base directory
+    arrow::fs::FileSelector selector;
+    selector.base_dir = "";
+    selector.recursive = true;
+    ASSERT_OK_AND_ASSIGN(auto files, file_system_->GetFileInfo(selector));
+
+    // Create a FileSystemDatasetFactory
+    arrow::dataset::FileSystemFactoryOptions factory_options;
+    factory_options.partitioning = partitioning;
+    ASSERT_OK_AND_ASSIGN(auto dataset_factory,
+                         arrow::dataset::FileSystemDatasetFactory::Make(
+                             file_system_, files, file_format_, 
factory_options));
+
+    // Create a Dataset
+    ASSERT_OK_AND_ASSIGN(auto dataset, dataset_factory->Finish());
+
+    // Create a ScannerBuilder
+    ASSERT_OK_AND_ASSIGN(auto scanner_builder, dataset->NewScan());
+
+    // Create a Scanner
+    ASSERT_OK_AND_ASSIGN(auto scanner, scanner_builder->Finish());

Review Comment:
   I finished out the test to write and read



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to