This is an automated email from the ASF dual-hosted git repository.
pvary pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 1bdac5106e HIVE-26200: Add tests for Iceberg DELETE statements for
every supported type (Peter Vary reviewed by Laszlo Pinter) (#3268)
1bdac5106e is described below
commit 1bdac5106ea623b5799a60df5de16ffb08a70698
Author: pvary <[email protected]>
AuthorDate: Fri May 6 08:47:47 2022 +0200
HIVE-26200: Add tests for Iceberg DELETE statements for every supported
type (Peter Vary reviewed by Laszlo Pinter) (#3268)
---
.../iceberg/mr/hive/TestHiveIcebergInserts.java | 5 +++-
.../apache/iceberg/mr/hive/TestHiveIcebergV2.java | 29 ++++++++++++++++++++++
2 files changed, 33 insertions(+), 1 deletion(-)
diff --git
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergInserts.java
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergInserts.java
index f38eea1969..7e3c72bf31 100644
---
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergInserts.java
+++
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergInserts.java
@@ -70,14 +70,17 @@ public class TestHiveIcebergInserts extends
HiveIcebergStorageHandlerWithEngineB
public void testInsertSupportedTypes() throws IOException {
for (int i = 0; i < SUPPORTED_TYPES.size(); i++) {
Type type = SUPPORTED_TYPES.get(i);
+
// TODO: remove this filter when issue #1881 is resolved
if (type == Types.UUIDType.get() && fileFormat == FileFormat.PARQUET) {
continue;
}
+
// TODO: remove this filter when we figure out how we could test binary
types
- if (type.equals(Types.BinaryType.get()) ||
type.equals(Types.FixedType.ofLength(5))) {
+ if (type == Types.BinaryType.get() ||
type.equals(Types.FixedType.ofLength(5))) {
continue;
}
+
String columnName = type.typeId().toString().toLowerCase() + "_column";
Schema schema = new Schema(required(1, "id", Types.LongType.get()),
required(2, columnName, type));
diff --git
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergV2.java
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergV2.java
index 569a9d3fc3..1c9d3e1922 100644
---
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergV2.java
+++
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergV2.java
@@ -34,12 +34,14 @@ import org.apache.iceberg.deletes.PositionDelete;
import org.apache.iceberg.mr.TestHelper;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import static org.apache.iceberg.types.Types.NestedField.optional;
+import static org.apache.iceberg.types.Types.NestedField.required;
/**
* Tests Format V2 specific features, such as reading/writing V2 tables, using
delete files, etc.
@@ -354,6 +356,33 @@ public class TestHiveIcebergV2 extends
HiveIcebergStorageHandlerWithEngineBase {
HiveIcebergTestUtils.validateData(expected,
HiveIcebergTestUtils.valueForRow(newSchema, objects), 0);
}
+ @Test
+ public void testDeleteForSupportedTypes() throws IOException {
+ for (int i = 0; i < SUPPORTED_TYPES.size(); i++) {
+ Type type = SUPPORTED_TYPES.get(i);
+
+ // TODO: remove this filter when issue #1881 is resolved
+ if (type == Types.UUIDType.get() && fileFormat == FileFormat.PARQUET) {
+ continue;
+ }
+
+ // TODO: remove this filter when we figure out how we could test binary
types
+ if (type == Types.BinaryType.get() ||
type.equals(Types.FixedType.ofLength(5))) {
+ continue;
+ }
+
+ String tableName = type.typeId().toString().toLowerCase() + "_table_" +
i;
+ String columnName = type.typeId().toString().toLowerCase() + "_column";
+
+ Schema schema = new Schema(required(1, columnName, type));
+ List<Record> records = TestHelper.generateRandomRecords(schema, 1, 0L);
+ Table table = testTables.createTable(shell, tableName, schema,
fileFormat, records, 2);
+
+ shell.executeStatement("DELETE FROM " + tableName);
+ HiveIcebergTestUtils.validateData(table, ImmutableList.of(), 0);
+ }
+ }
+
private static <T> PositionDelete<T> positionDelete(CharSequence path, long
pos, T row) {
PositionDelete<T> positionDelete = PositionDelete.create();
return positionDelete.set(path, pos, row);