This is an automated email from the ASF dual-hosted git repository.

michaelsmith pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit a9ad48484a739cc66caf573cc30afa298cd0b767
Author: Zoltan Borok-Nagy <[email protected]>
AuthorDate: Thu Nov 16 14:44:33 2023 +0100

    IMPALA-12557: DELETE throws DateTimeParseException when deleting from 
time-partitioned table
    
    There's a bug in IcebergDeleteSink that prevents Impala from
    successfully executing a DELETE operation on Iceberg tables. During
    DELETE we retrieve partition values from the virtual column
    ICEBERG__PARTITION__SERIALIZED. This contains the transformed values,
    e.g. in case of DAY-partitioning it contains the number of days since
    the UNIX epoch.
    
    Currently IcebergDeleteSink just uses these values as they are.
    There are two problems with this. First, we want to place the delete
    files under human-readable partition directories like other engines
    do, and like our own INSERT statement does. I.e. we want a partition
    directory /ts_day=2023-11-11/ and not /ts_day=19672/. The other problem
    is that 'IcebergUtil.partitionDataFromDataFile()' also expects the
    human-readable representation. This could be resolved at the CatalogD
    side to just accept the integer values, but then we would still need
    some logic in the IcebergDeleteSink to generate the human-readable
    values for the file paths.
    
    Moreover, partition values from INSERT statements are also
    received in the human-readable representation at the Catalog.
    
    This patch fixes the error by adding functions that transforms the
    partition values to their human-readable representations. This is
    done in the IcebergDeleteSink, so the Catalog-side logic is not
    affected.
    
    The above only affects the time-based transforms (YEAR, MONTH, DAY,
    HOUR), as other partition transform values don't use different
    representations.
    
    Some notes on HOUR transform and daylight saving time:
    There is no 1:1 mapping between an offset and the human-readable
    representation in a timezone that has daylight saving time. This is not
    an issue, as Impala's TIMESTAMP type is timezone-less. This also won't
    be an issue for the TIMESTAMPTZ type as timestamp values are normalized
    to UTC when stored, and UTC doesn't have daylight saving time.
    
    Testing:
     * C++ backend tests
     * E2E tests for all time-based transforms, and also partition evolution
     * Also added an extra test about TRUNCATEing numeric values which was
       untested
    
    Change-Id: I1cfeaed6409289663eb0f65b1ee2ecebd93e6118
    Reviewed-on: http://gerrit.cloudera.org:8080/20711
    Reviewed-by: Impala Public Jenkins <[email protected]>
    Tested-by: Impala Public Jenkins <[email protected]>
---
 be/src/exec/hdfs-table-sink.cc                     |   3 +-
 be/src/exec/hdfs-table-sink.h                      |   2 +-
 be/src/exec/iceberg-delete-sink.cc                 |  55 ++-
 be/src/exec/iceberg-delete-sink.h                  |   9 +-
 be/src/exec/table-sink-base.cc                     |   2 +-
 be/src/exec/table-sink-base.h                      |   2 +-
 be/src/runtime/descriptors.cc                      |   6 +-
 be/src/runtime/descriptors.h                       |   6 +
 be/src/util/CMakeLists.txt                         |   3 +
 be/src/util/iceberg-utility-functions-test.cc      | 101 ++++++
 be/src/util/iceberg-utility-functions.cc           |  95 ++++++
 be/src/util/iceberg-utility-functions.h            |  69 ++++
 .../QueryTest/iceberg-delete-partitioned.test      | 376 +++++++++++++++++++++
 13 files changed, 703 insertions(+), 26 deletions(-)

diff --git a/be/src/exec/hdfs-table-sink.cc b/be/src/exec/hdfs-table-sink.cc
index 6a54d71d0..dd86d4faf 100644
--- a/be/src/exec/hdfs-table-sink.cc
+++ b/be/src/exec/hdfs-table-sink.cc
@@ -261,7 +261,7 @@ Status HdfsTableSink::WriteClusteredRowBatch(RuntimeState* 
state, RowBatch* batc
   return Status::OK();
 }
 
-void HdfsTableSink::ConstructPartitionInfo(
+Status HdfsTableSink::ConstructPartitionInfo(
     const TupleRow* row,
     OutputPartition* output_partition) {
   DCHECK(output_partition != nullptr);
@@ -305,6 +305,7 @@ void HdfsTableSink::ConstructPartitionInfo(
     // Use default partition spec id.
     output_partition->iceberg_spec_id = table_desc_->IcebergSpecId();
   }
+  return Status::OK();
 }
 
 inline const HdfsPartitionDescriptor* HdfsTableSink::GetPartitionDescriptor(
diff --git a/be/src/exec/hdfs-table-sink.h b/be/src/exec/hdfs-table-sink.h
index dd67d410c..7211454a6 100644
--- a/be/src/exec/hdfs-table-sink.h
+++ b/be/src/exec/hdfs-table-sink.h
@@ -136,7 +136,7 @@ class HdfsTableSink : public TableSinkBase {
   /// 'output_partition->raw_partition_names' is a vector of partition 
key-values in a
   /// non-encoded format.
   /// Staying with the above example this would hold ["a=12/31/11", "b=10"].
-  void ConstructPartitionInfo(
+  Status ConstructPartitionInfo(
       const TupleRow* row,
       OutputPartition* output_partition) override;
 
diff --git a/be/src/exec/iceberg-delete-sink.cc 
b/be/src/exec/iceberg-delete-sink.cc
index 9e6e9ec28..ca80218ca 100644
--- a/be/src/exec/iceberg-delete-sink.cc
+++ b/be/src/exec/iceberg-delete-sink.cc
@@ -32,6 +32,7 @@
 #include "util/coding-util.h"
 #include "util/debug-util.h"
 #include "util/hdfs-util.h"
+#include "util/iceberg-utility-functions.h"
 #include "util/impalad-metrics.h"
 #include "util/metrics.h"
 #include "util/runtime-profile-counters.h"
@@ -228,7 +229,18 @@ void IcebergDeleteSink::Close(RuntimeState* state) {
   closed_ = true;
 }
 
-void IcebergDeleteSink::ConstructPartitionInfo(
+std::string IcebergDeleteSink::HumanReadablePartitionValue(
+    TIcebergPartitionTransformType::type transform_type, const std::string& 
value,
+    Status* transform_result) {
+  if (!iceberg::IsTimeBasedPartition(transform_type) ||
+    value == table_desc_->null_partition_key_value()) {
+    *transform_result = Status::OK();
+    return value;
+  }
+  return iceberg::HumanReadableTime(transform_type, value, transform_result);
+}
+
+Status IcebergDeleteSink::ConstructPartitionInfo(
     const TupleRow* row,
     OutputPartition* output_partition) {
   DCHECK(output_partition != nullptr);
@@ -236,7 +248,7 @@ void IcebergDeleteSink::ConstructPartitionInfo(
 
   if (partition_key_expr_evals_.empty()) {
     output_partition->iceberg_spec_id = table_desc_->IcebergSpecId();
-    return;
+    return Status::OK();
   }
 
   DCHECK_EQ(partition_key_expr_evals_.size(), 2);
@@ -252,28 +264,28 @@ void IcebergDeleteSink::ConstructPartitionInfo(
       partitions_strings_val.len);
 
   vector<string> non_void_partition_names;
-  const vector<string>* non_void_partition_names_ptr = nullptr;
+  vector<TIcebergPartitionTransformType::type> non_void_partition_transforms;
   if (LIKELY(spec_id == table_desc_->IcebergSpecId())) {
-    // If 'spec_id' is the default spec id, then point 
'non_void_partition_names_ptr'
-    // to the already existing vector 
'table_desc_->IcebergNonVoidPartitionNames()'.
-    non_void_partition_names_ptr = 
&table_desc_->IcebergNonVoidPartitionNames();
+    // If 'spec_id' is the default spec id, then just copy the already 
populated
+    // non void partition names and transforms.
+    non_void_partition_names = table_desc_->IcebergNonVoidPartitionNames();
+    non_void_partition_transforms = 
table_desc_->IcebergNonVoidPartitionTransforms();
   } else {
-    // Otherwise collect the non-void partition names belonging to 'spec_id' in
-    // 'non_void_partition_names' and point 'non_void_partition_names_ptr' to 
it.
+    // Otherwise collect the non-void partition names belonging to 'spec_id'.
     const TIcebergPartitionSpec& partition_spec =
         table_desc_->IcebergPartitionSpecs()[spec_id];
     for (const TIcebergPartitionField& spec_field : 
partition_spec.partition_fields) {
-      if (spec_field.transform.transform_type != 
TIcebergPartitionTransformType::VOID) {
+      auto transform_type = spec_field.transform.transform_type;
+      if (transform_type != TIcebergPartitionTransformType::VOID) {
         non_void_partition_names.push_back(spec_field.field_name);
+        non_void_partition_transforms.push_back(transform_type);
       }
     }
-    non_void_partition_names_ptr = &non_void_partition_names;
   }
-  DCHECK(non_void_partition_names_ptr != nullptr);
 
-  if (non_void_partition_names_ptr->empty()) {
+  if (non_void_partition_names.empty()) {
     DCHECK(partition_values_str.empty());
-    return;
+    return Status::OK();
   }
 
   vector<string> partition_values_encoded;
@@ -288,32 +300,39 @@ void IcebergDeleteSink::ConstructPartitionInfo(
     partition_values_decoded.push_back(std::move(decoded_val));
   }
 
-  DCHECK_EQ(partition_values_decoded.size(), 
non_void_partition_names_ptr->size());
+  DCHECK_EQ(partition_values_decoded.size(), non_void_partition_names.size());
+  DCHECK_EQ(partition_values_decoded.size(), 
non_void_partition_transforms.size());
 
   stringstream url_encoded_partition_name_ss;
   stringstream external_partition_name_ss;
 
   for (int i = 0; i < partition_values_decoded.size(); ++i) {
+    auto transform_type = non_void_partition_transforms[i];
     stringstream raw_partition_key_value_ss;
     stringstream encoded_partition_key_value_ss;
 
-    raw_partition_key_value_ss << (*non_void_partition_names_ptr)[i] << "=";
-    encoded_partition_key_value_ss << (*non_void_partition_names_ptr)[i] << 
"=";
+    raw_partition_key_value_ss << non_void_partition_names[i] << "=";
+    encoded_partition_key_value_ss << non_void_partition_names[i] << "=";
 
     string& value_str = partition_values_decoded[i];
+    Status transform_status;
+    value_str = HumanReadablePartitionValue(
+        transform_type, value_str, &transform_status);
+    if (!transform_status.ok()) return transform_status;
     raw_partition_key_value_ss << value_str;
 
     string part_key_value = UrlEncodePartitionValue(value_str);
     encoded_partition_key_value_ss << part_key_value;
-    if (i < partition_key_expr_evals_.size() - 1) 
encoded_partition_key_value_ss << "/";
+    if (i < partition_values_decoded.size() - 1) 
encoded_partition_key_value_ss << "/";
 
     url_encoded_partition_name_ss << encoded_partition_key_value_ss.str();
-
     
output_partition->raw_partition_names.push_back(raw_partition_key_value_ss.str());
   }
 
   output_partition->partition_name = url_encoded_partition_name_ss.str();
   output_partition->external_partition_name = external_partition_name_ss.str();
+
+  return Status::OK();
 }
 
 string IcebergDeleteSink::DebugString() const {
diff --git a/be/src/exec/iceberg-delete-sink.h 
b/be/src/exec/iceberg-delete-sink.h
index 1a7cbc02e..7206ae3b5 100644
--- a/be/src/exec/iceberg-delete-sink.h
+++ b/be/src/exec/iceberg-delete-sink.h
@@ -74,10 +74,17 @@ class IcebergDeleteSink : public TableSinkBase {
   /// tables 'row' must contain the Iceberg virtual columns 
PARTITION__SPEC__ID and
   /// ICEBERG__PARTITION__SERIALIZED. Every information needed for 
'output_partition' can
   /// be retrieved from these fields and from the 'table_desc_'.
-  void ConstructPartitionInfo(
+  Status ConstructPartitionInfo(
       const TupleRow* row,
       OutputPartition* output_partition) override;
 
+  /// Returns the human-readable representation of a partition transform 
value. It is used
+  /// to create the file paths. IcebergUtil.partitionDataFromDataFile() also 
expects
+  /// partition values in this representation.
+  std::string HumanReadablePartitionValue(
+      TIcebergPartitionTransformType::type transform_type, const std::string& 
value,
+      Status* transform_result);
+
   /// Maps all rows in 'batch' to partitions and appends them to their 
temporary Hdfs
   /// files. The input must be ordered by the partition key expressions.
   Status WriteClusteredRowBatch(RuntimeState* state, RowBatch* batch) 
WARN_UNUSED_RESULT;
diff --git a/be/src/exec/table-sink-base.cc b/be/src/exec/table-sink-base.cc
index c3da5f04e..3ec0d3bde 100644
--- a/be/src/exec/table-sink-base.cc
+++ b/be/src/exec/table-sink-base.cc
@@ -196,7 +196,7 @@ Status TableSinkBase::InitOutputPartition(RuntimeState* 
state,
     OutputPartition* output_partition, bool empty_partition) {
   // Build the unique name for this partition from the partition keys, e.g. 
"j=1/f=foo/"
   // etc.
-  ConstructPartitionInfo(row, output_partition);
+  RETURN_IF_ERROR(ConstructPartitionInfo(row, output_partition));
 
   BuildHdfsFileNames(partition_descriptor, output_partition);
 
diff --git a/be/src/exec/table-sink-base.h b/be/src/exec/table-sink-base.h
index 3067cbe40..14ce157da 100644
--- a/be/src/exec/table-sink-base.h
+++ b/be/src/exec/table-sink-base.h
@@ -86,7 +86,7 @@ protected:
 
   virtual bool IsHiveAcid() const { return false; }
 
-  virtual void ConstructPartitionInfo(
+  virtual Status ConstructPartitionInfo(
       const TupleRow* row,
       OutputPartition* output_partition) = 0;
 
diff --git a/be/src/runtime/descriptors.cc b/be/src/runtime/descriptors.cc
index 75d77b551..4f25c0454 100644
--- a/be/src/runtime/descriptors.cc
+++ b/be/src/runtime/descriptors.cc
@@ -262,10 +262,10 @@ HdfsTableDescriptor::HdfsTableDescriptor(const 
TTableDescriptor& tdesc, ObjectPo
     const TIcebergPartitionSpec& spec = 
iceberg_partition_specs_[iceberg_spec_id_];
     DCHECK_EQ(spec.spec_id, iceberg_spec_id_);
     for (const TIcebergPartitionField& spec_field : spec.partition_fields) {
-      if (spec_field.transform.transform_type == 
TIcebergPartitionTransformType::VOID) {
-        continue;
-      }
+      auto transform_type = spec_field.transform.transform_type;
+      if (transform_type == TIcebergPartitionTransformType::VOID) continue;
       iceberg_non_void_partition_names_.push_back(spec_field.field_name);
+      iceberg_non_void_partition_transforms_.push_back(transform_type);
     }
     iceberg_parquet_compression_codec_ = 
tdesc.icebergTable.parquet_compression_codec;
     iceberg_parquet_row_group_size_ = 
tdesc.icebergTable.parquet_row_group_size;
diff --git a/be/src/runtime/descriptors.h b/be/src/runtime/descriptors.h
index a1d7560d9..5b7ae6381 100644
--- a/be/src/runtime/descriptors.h
+++ b/be/src/runtime/descriptors.h
@@ -449,6 +449,10 @@ class HdfsTableDescriptor : public TableDescriptor {
   const std::vector<std::string>& IcebergNonVoidPartitionNames() const {
     return iceberg_non_void_partition_names_;
   }
+  const std::vector<TIcebergPartitionTransformType::type>&
+      IcebergNonVoidPartitionTransforms() const {
+    return iceberg_non_void_partition_transforms_;
+  }
   const TCompressionCodec& IcebergParquetCompressionCodec() const {
     return iceberg_parquet_compression_codec_;
   }
@@ -484,6 +488,8 @@ class HdfsTableDescriptor : public TableDescriptor {
   std::string iceberg_table_location_;
   std::vector<TIcebergPartitionSpec> iceberg_partition_specs_;
   std::vector<std::string> iceberg_non_void_partition_names_;
+  std::vector<TIcebergPartitionTransformType::type>
+      iceberg_non_void_partition_transforms_;
   TCompressionCodec iceberg_parquet_compression_codec_;
   int64_t iceberg_parquet_row_group_size_;
   int64_t iceberg_parquet_plain_page_size_;
diff --git a/be/src/util/CMakeLists.txt b/be/src/util/CMakeLists.txt
index 228be3d18..3dbbbbdea 100644
--- a/be/src/util/CMakeLists.txt
+++ b/be/src/util/CMakeLists.txt
@@ -65,6 +65,7 @@ set(UTIL_SRCS
   hdfs-bulk-ops.cc
   hdr-histogram.cc
   histogram-metric.cc
+  iceberg-utility-functions.cc
   impalad-metrics.cc
   impala-bloom-filter-buffer-allocator.cc
   in-list-filter.cc
@@ -183,6 +184,7 @@ add_library(UtilTests STATIC
   fixed-size-hash-table-test.cc
   hdfs-util-test.cc
   hdr-histogram-test.cc
+  iceberg-utility-functions-test.cc
   in-list-filter-test.cc
   jwt-util-test.cc
   logging-support-test.cc
@@ -236,6 +238,7 @@ ADD_UNIFIED_BE_LSAN_TEST(filesystem-util-test 
"FilesystemUtil.*")
 ADD_UNIFIED_BE_LSAN_TEST(fixed-size-hash-table-test "FixedSizeHash.*")
 ADD_UNIFIED_BE_LSAN_TEST(hdfs-util-test HdfsUtilTest.*)
 ADD_UNIFIED_BE_LSAN_TEST(hdr-histogram-test HdrHistogramTest.*)
+ADD_UNIFIED_BE_LSAN_TEST(iceberg-utility-functions-test "IcebergPartitions.*")
 # internal-queue-test has a non-standard main(), so it needs a small amount of 
thought
 # to use a unified executable
 ADD_BE_LSAN_TEST(internal-queue-test)
diff --git a/be/src/util/iceberg-utility-functions-test.cc 
b/be/src/util/iceberg-utility-functions-test.cc
new file mode 100644
index 000000000..90c324700
--- /dev/null
+++ b/be/src/util/iceberg-utility-functions-test.cc
@@ -0,0 +1,101 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "util/iceberg-utility-functions.h"
+
+#include "testutil/gtest-util.h"
+
+#include "common/names.h"
+
+namespace impala {
+namespace iceberg {
+
+TEST(IcebergPartitions, HumanReadableYearTest) {
+  EXPECT_EQ("1969", HumanReadableYear(-1));
+  EXPECT_EQ("1970", HumanReadableYear(0));
+  EXPECT_EQ("1971", HumanReadableYear(1));
+  EXPECT_EQ("2000", HumanReadableYear(30));
+  EXPECT_EQ("2023", HumanReadableYear(53));
+}
+
+TEST(IcebergPartitions, HumanReadableMonthTest) {
+  EXPECT_EQ("1969-01", HumanReadableMonth(-12));
+  EXPECT_EQ("1969-12", HumanReadableMonth(-1));
+  EXPECT_EQ("1970-01", HumanReadableMonth(0));
+  EXPECT_EQ("2000-01", HumanReadableMonth(360));
+  EXPECT_EQ("2023-11", HumanReadableMonth(360+23*12+10));
+}
+
+TEST(IcebergPartitions, HumanReadableDayTest) {
+  // Used https://www.timeanddate.com/ to verify the followings:
+  EXPECT_EQ("1934-02-28", HumanReadableDay(-13091));
+  EXPECT_EQ("1969-01-01", HumanReadableDay(-365));
+  EXPECT_EQ("1969-12-30", HumanReadableDay(-2));
+  EXPECT_EQ("1969-12-31", HumanReadableDay(-1));
+  EXPECT_EQ("1970-01-01", HumanReadableDay(0));
+  EXPECT_EQ("1970-07-24", HumanReadableDay(204));
+  EXPECT_EQ("1987-10-06", HumanReadableDay(6487));
+  EXPECT_EQ("1991-01-23", HumanReadableDay(7692));
+  EXPECT_EQ("1994-05-06", HumanReadableDay(8891));
+  EXPECT_EQ("2000-09-19", HumanReadableDay(11219));
+  EXPECT_EQ("2023-11-11", HumanReadableDay(19672));
+}
+
+TEST(IcebergPartitions, HumanReadableHourTest) {
+  // Used https://www.timeanddate.com/ to verify the followings:
+  EXPECT_EQ("1934-02-28-10", HumanReadableHour(-314174));
+  EXPECT_EQ("1969-01-01-01", HumanReadableHour(-365*24+1));
+  EXPECT_EQ("1969-12-30-23", HumanReadableHour(-25));
+  EXPECT_EQ("1969-12-31-23", HumanReadableHour(-1));
+  EXPECT_EQ("1970-01-01-00", HumanReadableHour(0));
+  EXPECT_EQ("1970-01-01-01", HumanReadableHour(1));
+  EXPECT_EQ("1970-01-01-02", HumanReadableHour(2));
+  EXPECT_EQ("1970-07-24-13", HumanReadableHour(4909));
+  EXPECT_EQ("1971-01-01-00", HumanReadableHour(365*24));
+  EXPECT_EQ("1987-10-06-18", HumanReadableHour(155706));
+  EXPECT_EQ("1991-01-23-08", HumanReadableHour(184616));
+  EXPECT_EQ("1994-05-06-11", HumanReadableHour(213395));
+  EXPECT_EQ("2000-09-19-13", HumanReadableHour(269269));
+  EXPECT_EQ("2023-11-11-22", HumanReadableHour(472150));
+}
+
+TEST(IcebergPartitions, HumanReadableTimeTest) {
+  Status st;
+  EXPECT_EQ("1967", HumanReadableTime(
+      TIcebergPartitionTransformType::YEAR, "-3", &st));
+  EXPECT_OK(st);
+  EXPECT_EQ("", HumanReadableTime(
+      TIcebergPartitionTransformType::YEAR, "invalid", &st));
+  EXPECT_ERROR(st, TErrorCode::GENERAL);
+  EXPECT_EQ("Failed to parse time partition value 'invalid' as int.\n", 
st.GetDetail());
+  EXPECT_EQ("1977", HumanReadableTime(
+      TIcebergPartitionTransformType::YEAR, "7", &st));
+  EXPECT_OK(st);
+  // 1970-01 + 7 months is 1970-08
+  EXPECT_EQ("1970-08", HumanReadableTime(
+      TIcebergPartitionTransformType::MONTH, "7", &st));
+  EXPECT_OK(st);
+  EXPECT_EQ("1970-01-08", HumanReadableTime(
+      TIcebergPartitionTransformType::DAY, "7", &st));
+  EXPECT_OK(st);
+  EXPECT_EQ("1970-01-01-07", HumanReadableTime(
+      TIcebergPartitionTransformType::HOUR, "7", &st));
+  EXPECT_OK(st);
+}
+
+}
+}
diff --git a/be/src/util/iceberg-utility-functions.cc 
b/be/src/util/iceberg-utility-functions.cc
new file mode 100644
index 000000000..89a626df9
--- /dev/null
+++ b/be/src/util/iceberg-utility-functions.cc
@@ -0,0 +1,95 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "util/iceberg-utility-functions.h"
+
+#include "cctz/civil_time.h"
+#include "cctz/time_zone.h"
+
+#include "common/status.h"
+#include "gen-cpp/CatalogObjects_types.h"
+#include "gutil/strings/substitute.h"
+#include "util/string-parser.h"
+
+#include "common/names.h"
+
+namespace impala {
+namespace iceberg {
+
+static constexpr int32_t ICEBERG_EPOCH_YEAR = 1970;
+
+bool IsTimeBasedPartition(TIcebergPartitionTransformType::type transform_type) 
{
+  if (transform_type == TIcebergPartitionTransformType::YEAR ||
+      transform_type == TIcebergPartitionTransformType::MONTH ||
+      transform_type == TIcebergPartitionTransformType::DAY ||
+      transform_type == TIcebergPartitionTransformType::HOUR) {
+    return true;
+  }
+  return false;
+}
+
+template <typename T>
+string GetFormattedTimePoint(int32_t offset, const string& format) {
+  static const T epoch;
+  static const cctz::time_zone utc;
+  T datetime = epoch + offset;
+  auto time_point = cctz::convert(datetime, utc);
+  return cctz::format(format, time_point, utc);
+}
+
+string HumanReadableYear(int32_t part_value) {
+  return std::to_string(part_value + ICEBERG_EPOCH_YEAR);
+}
+
+string HumanReadableMonth(int32_t part_value) {
+  return GetFormattedTimePoint<cctz::civil_month>(part_value, "%Y-%m");
+}
+
+string HumanReadableDay(int32_t part_value) {
+  return GetFormattedTimePoint<cctz::civil_day>(part_value, "%Y-%m-%d");
+}
+
+string HumanReadableHour(int32_t part_value) {
+  return GetFormattedTimePoint<cctz::civil_hour>(part_value, "%Y-%m-%d-%H");
+}
+
+string HumanReadableTime(TIcebergPartitionTransformType::type transform_type,
+    const string& part_value, Status* status) {
+  DCHECK(status != nullptr);
+  StringParser::ParseResult parse_result;
+  int32_t int_part_value = StringParser::StringToInt<int32_t>(
+      part_value.c_str(), part_value.size(), &parse_result);
+  if (parse_result != StringParser::ParseResult::PARSE_SUCCESS) {
+    *status = Status(Substitute("Failed to parse time partition value '$0' as 
int.",
+        part_value));
+    return "";
+  }
+  *status = Status::OK();
+  switch (transform_type) {
+    case TIcebergPartitionTransformType::YEAR:  return 
HumanReadableYear(int_part_value);
+    case TIcebergPartitionTransformType::MONTH: return 
HumanReadableMonth(int_part_value);
+    case TIcebergPartitionTransformType::DAY:   return 
HumanReadableDay(int_part_value);
+    case TIcebergPartitionTransformType::HOUR:  return 
HumanReadableHour(int_part_value);
+    default:
+        DCHECK(false);
+        *status = Status(Substitute("Unknown transform type: $0", 
transform_type));
+  }
+  return "";
+}
+
+}
+}
diff --git a/be/src/util/iceberg-utility-functions.h 
b/be/src/util/iceberg-utility-functions.h
new file mode 100644
index 000000000..f5dad3a37
--- /dev/null
+++ b/be/src/util/iceberg-utility-functions.h
@@ -0,0 +1,69 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#pragma once
+
+#include "gen-cpp/CatalogObjects_types.h"
+
+namespace impala {
+
+class Status;
+
+namespace iceberg {
+
+/// Returns True if 'transform_type' is transformation based on a time value.
+/// I.e. YEAR, MONTH, DAY, HOUR
+bool IsTimeBasedPartition(TIcebergPartitionTransformType::type transform_type);
+
+/// Returns a human readable year.
+/// Based on Iceberg's TransformUtil.humanYear()
+/// E.g.:
+/// 0  => 1970
+/// 53 => 2023
+std::string HumanReadableYear(int32_t part_value);
+
+/// Returns a human readable month.
+/// Based on Iceberg's TransformUtil.humanMonth()
+/// E.g.:
+/// 0  => 1970-01
+/// 36 => 1973-01
+std::string HumanReadableMonth(int32_t part_value);
+
+/// Returns a human readable day.
+/// Based on Iceberg's TransformUtil.humanDay()
+/// E.g.:
+/// 0  => 1970-01-01
+/// 10 => 1970-01-11
+std::string HumanReadableDay(int32_t part_value);
+
+/// Returns a human readable hour.
+/// Based on Iceberg's TransformUtil.humanHour()
+/// E.g.:
+/// 0  => 1970-01-01-00
+/// 10 => 1970-01-01-10
+std::string HumanReadableHour(int32_t part_value);
+
+/// @param transform_type is the Iceberg partition transform (YEAR, MONTH, 
etc.).
+/// @param part_value is the transformed value (offset since unix epoch).
+/// @param transform_result set to ERROR in case of failure.
+/// @return Returns a human readable time value.
+std::string HumanReadableTime(
+    TIcebergPartitionTransformType::type transform_type, const std::string& 
part_value,
+    Status* transform_result);
+
+}
+}
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-delete-partitioned.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-delete-partitioned.test
index 9c3da8866..7d4b999b9 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-delete-partitioned.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-delete-partitioned.test
@@ -407,3 +407,379 @@ SELECT * FROM ice_store_sales where ss_customer_sk % 10 = 
0;
 ---- TYPES
 INT, BIGINT, INT, INT, INT, INT, INT, INT, BIGINT, INT, DECIMAL, DECIMAL, 
DECIMAL, DECIMAL, DECIMAL, DECIMAL, DECIMAL, DECIMAL, DECIMAL, DECIMAL, 
DECIMAL, DECIMAL, INT
 ====
+---- QUERY
+create table date_year_part (i int, d date)
+partitioned by spec (year(d))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into date_year_part values (1, '1968-01-01 01:02:03'),
+  (2, '1969-12-31 23:59:00'), (3, '1970-01-01 00:00:00'),
+  (4, '2023-11-15 15:31:00');
+show files in date_year_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1968/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1969/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1970/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=2023/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1968/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1969/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1970/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=2023/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from date_year_part where i > 0;
+show files in date_year_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1968/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1969/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1970/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=2023/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1968/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1969/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=1970/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_year_part/data/d_year=2023/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from date_year_part;
+---- RESULTS
+---- TYPES
+INT, DATE
+====
+---- QUERY
+create table date_month_part (i int, d date)
+partitioned by spec (month(d))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into date_month_part values (1, '1968-01-01'),
+  (2, '1969-12-31'), (3, '1970-01-01'), (4, '2023-11-15');
+show files in date_month_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1968-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1969-12/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1970-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=2023-11/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1968-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1969-12/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1970-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=2023-11/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from date_month_part where i > 0;
+show files in date_month_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1968-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1969-12/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1970-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=2023-11/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1968-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1969-12/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=1970-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_month_part/data/d_month=2023-11/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from date_month_part;
+---- RESULTS
+---- TYPES
+INT, DATE
+====
+---- QUERY
+create table date_day_part (i int, d date)
+partitioned by spec (day(d))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into date_day_part values (1, '1968-01-01'),
+  (2, '1969-12-31'), (3, '1970-01-01'), (4, '2023-11-15');
+show files in date_day_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1968-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1969-12-31/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1970-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=2023-11-15/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1968-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1969-12-31/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1970-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=2023-11-15/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from date_day_part where i > 0;
+show files in date_day_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1968-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1969-12-31/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1970-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=2023-11-15/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1968-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1969-12-31/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=1970-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/date_day_part/data/d_day=2023-11-15/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from date_day_part;
+---- RESULTS
+---- TYPES
+INT, DATE
+====
+---- QUERY
+create table ts_year_part (i int, ts timestamp)
+partitioned by spec (year(ts))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into ts_year_part values (1, '1968-01-01'),
+  (2, '1969-12-31'), (3, '1970-01-01'), (4, '2023-11-15');
+show files in ts_year_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1968/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1969/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1970/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=2023/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1968/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1969/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1970/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=2023/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from ts_year_part where i > 0;
+show files in ts_year_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1968/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1969/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1970/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=2023/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1968/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1969/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=1970/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_year_part/data/ts_year=2023/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from ts_year_part;
+---- RESULTS
+---- TYPES
+INT, TIMESTAMP
+====
+---- QUERY
+create table ts_month_part (i int, ts timestamp)
+partitioned by spec (month(ts))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into ts_month_part values (1, '1968-01-01 01:02:03'),
+  (2, '1969-12-31 23:59:00'), (3, '1970-01-01 00:00:00'),
+  (4, '2023-11-15 15:31:00');
+show files in ts_month_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1968-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1969-12/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1970-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=2023-11/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1968-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1969-12/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1970-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=2023-11/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from ts_month_part where i > 0;
+show files in ts_month_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1968-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1969-12/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1970-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=2023-11/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1968-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1969-12/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=1970-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_month_part/data/ts_month=2023-11/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from ts_month_part;
+---- RESULTS
+---- TYPES
+INT, TIMESTAMP
+====
+---- QUERY
+create table ts_day_part (i int, ts timestamp)
+partitioned by spec (day(ts))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into ts_day_part values (1, '1968-01-01 01:02:03'),
+  (2, '1969-12-31 23:59:00'), (3, '1970-01-01 00:00:00'),
+  (4, '2023-11-15 15:31:00');
+show files in ts_day_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1968-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1969-12-31/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1970-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=2023-11-15/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1968-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1969-12-31/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1970-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=2023-11-15/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from ts_day_part where i > 0;
+show files in ts_day_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1968-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1969-12-31/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1970-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=2023-11-15/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1968-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1969-12-31/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=1970-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_day_part/data/ts_day=2023-11-15/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from ts_day_part;
+---- RESULTS
+---- TYPES
+INT, TIMESTAMP
+====
+---- QUERY
+create table ts_hour_part (i int, ts timestamp)
+partitioned by spec (hour(ts))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into ts_hour_part values (1, '1968-01-01 01:02:03'),
+  (2, '1969-12-31 23:59:00'), (3, '1970-01-01 00:00:00'),
+  (4, '2023-11-15 15:31:00');
+show files in ts_hour_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1968-01-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1969-12-31-23/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1970-01-01-00/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=2023-11-15-15/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1968-01-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1969-12-31-23/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1970-01-01-00/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=2023-11-15-15/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from ts_hour_part where i > 0;
+show files in ts_hour_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1968-01-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1969-12-31-23/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1970-01-01-00/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=2023-11-15-15/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1968-01-01-01/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1969-12-31-23/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=1970-01-01-00/delete-.*parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_hour_part/data/ts_hour=2023-11-15-15/delete-.*parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from ts_hour_part;
+---- RESULTS
+---- TYPES
+INT, TIMESTAMP
+====
+---- QUERY
+create table ts_evolve_part (i int, ts timestamp)
+partitioned by spec (year(ts))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into ts_evolve_part values (1, '1968-01-01 01:02:03');
+alter table ts_evolve_part set partition spec (month(ts));
+insert into ts_evolve_part values (2, '1969-12-31 23:59:00');
+insert into ts_evolve_part values (111, 'invalid');
+alter table ts_evolve_part set partition spec (day(ts));
+insert into ts_evolve_part values (3, '1970-01-01 00:00:00');
+alter table ts_evolve_part set partition spec (hour(ts));
+insert into ts_evolve_part values (4, '2023-11-15 15:31:00');
+show files in ts_evolve_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_year=1968/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=1969-12/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=__HIVE_DEFAULT_PARTITION__/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_day=1970-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_hour=2023-11-15-15/(?!delete-).*.parq','.*B','','.*'
+---- RESULTS: VERIFY_IS_NOT_IN
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_year=1968/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=1969-12/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=__HIVE_DEFAULT_PARTITION__/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_day=1970-01-01/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_hour=2023-11-15-15/delete-.*.parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from ts_evolve_part where i > 0;
+show files in ts_evolve_part;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_year=1968/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=1969-12/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=__HIVE_DEFAULT_PARTITION__/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_day=1970-01-01/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_hour=2023-11-15-15/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_year=1968/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=1969-12/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_month=__HIVE_DEFAULT_PARTITION__/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_day=1970-01-01/delete-.*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/ts_evolve_part/data/ts_hour=2023-11-15-15/delete-.*.parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+select * from ts_evolve_part;
+---- RESULTS
+---- TYPES
+INT, TIMESTAMP
+====
+---- QUERY
+create table numeric_truncate (id int, int_col int, bigint_col bigint, dec_8_0 
decimal(8, 0), dec_10_2 decimal(10, 2))
+partitioned by spec (truncate(10, int_col), truncate(1000, bigint_col), 
void(id), truncate(20, dec_8_0), truncate(50, dec_10_2))
+stored by iceberg
+tblproperties ('format-version'='2');
+insert into numeric_truncate values (1, 12, 1222, 135, 20.75);
+show files in numeric_truncate;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/numeric_truncate/data/int_col_trunc=10/bigint_col_trunc=1000/dec_8_0_trunc=120/dec_10_2_trunc=20.50/(?!delete-).*.parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+delete from numeric_truncate where id = 1;
+---- DML_RESULTS: numeric_truncate
+---- TYPES
+INT,INT,BIGINT,DECIMAL,DECIMAL
+====
+---- QUERY
+show files in numeric_truncate;
+---- RESULTS: VERIFY_IS_SUBSET
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/numeric_truncate/data/int_col_trunc=10/bigint_col_trunc=1000/dec_8_0_trunc=120/dec_10_2_trunc=20.50/(?!delete-).*.parq','.*B','','.*'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/numeric_truncate/data/int_col_trunc=10/bigint_col_trunc=1000/dec_8_0_trunc=120/dec_10_2_trunc=20.50/delete-.*.parq','.*B','','.*'
+---- TYPES
+STRING, STRING, STRING, STRING
+====

Reply via email to