This is an automated email from the ASF dual-hosted git repository.

blue pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/master by this push:
     new ec69a25  Tests: Add unit tests for InternalRecordWrapper, 
RowDataWrapper, InternalRowWrapper (#2683)
ec69a25 is described below

commit ec69a25da24f38419107bfa264c582d16331c7e9
Author: openinx <[email protected]>
AuthorDate: Sat Jun 19 01:00:29 2021 +0800

    Tests: Add unit tests for InternalRecordWrapper, RowDataWrapper, 
InternalRowWrapper (#2683)
---
 .../apache/iceberg/data/InternalRecordWrapper.java |   8 +-
 .../java/org/apache/iceberg/RecordWrapperTest.java | 104 +++++++++++++++++++++
 .../apache/iceberg/flink/TestRowDataWrapper.java   |  89 ++++++++++++++++++
 .../spark/source/TestInternalRowWrapper.java       |  78 ++++++++++++++++
 4 files changed, 278 insertions(+), 1 deletion(-)

diff --git 
a/data/src/main/java/org/apache/iceberg/data/InternalRecordWrapper.java 
b/data/src/main/java/org/apache/iceberg/data/InternalRecordWrapper.java
index a2d0624..4aea96d 100644
--- a/data/src/main/java/org/apache/iceberg/data/InternalRecordWrapper.java
+++ b/data/src/main/java/org/apache/iceberg/data/InternalRecordWrapper.java
@@ -81,7 +81,13 @@ public class InternalRecordWrapper implements StructLike {
   @Override
   public <T> T get(int pos, Class<T> javaClass) {
     if (transforms[pos] != null) {
-      return javaClass.cast(transforms[pos].apply(wrapped.get(pos, 
Object.class)));
+      Object value = wrapped.get(pos, Object.class);
+      if (value == null) {
+        // transforms function don't allow to handle null values, so just 
return null here.
+        return null;
+      } else {
+        return javaClass.cast(transforms[pos].apply(value));
+      }
     }
     return wrapped.get(pos, javaClass);
   }
diff --git a/data/src/test/java/org/apache/iceberg/RecordWrapperTest.java 
b/data/src/test/java/org/apache/iceberg/RecordWrapperTest.java
new file mode 100644
index 0000000..0681472
--- /dev/null
+++ b/data/src/test/java/org/apache/iceberg/RecordWrapperTest.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg;
+
+import org.apache.iceberg.types.Types;
+import org.apache.iceberg.util.StructLikeWrapper;
+import org.junit.Assert;
+import org.junit.Test;
+
+import static org.apache.iceberg.types.Types.NestedField.optional;
+import static org.apache.iceberg.types.Types.NestedField.required;
+
+public abstract class RecordWrapperTest {
+
+  private static final Types.StructType PRIMITIVE_WITHOUT_TIME = 
Types.StructType.of(
+      required(100, "id", Types.LongType.get()),
+      optional(101, "data", Types.StringType.get()),
+      required(102, "b", Types.BooleanType.get()),
+      optional(103, "i", Types.IntegerType.get()),
+      required(104, "l", Types.LongType.get()),
+      optional(105, "f", Types.FloatType.get()),
+      required(106, "d", Types.DoubleType.get()),
+      optional(107, "date", Types.DateType.get()),
+      required(108, "ts_tz", Types.TimestampType.withZone()),
+      required(110, "s", Types.StringType.get()),
+      required(112, "fixed", Types.FixedType.ofLength(7)),
+      optional(113, "bytes", Types.BinaryType.get()),
+      required(114, "dec_9_0", Types.DecimalType.of(9, 0)),
+      required(115, "dec_11_2", Types.DecimalType.of(11, 2)),
+      required(116, "dec_38_10", Types.DecimalType.of(38, 10))// maximum 
precision
+  );
+
+  private static final Types.StructType TIMESTAMP_WITHOUT_ZONE = 
Types.StructType.of(
+      required(101, "ts0", Types.TimestampType.withoutZone()),
+      required(102, "ts1", Types.TimestampType.withoutZone())
+  );
+
+  protected static final Types.StructType TIME = Types.StructType.of(
+      required(100, "time0", Types.TimeType.get()),
+      optional(101, "time1", Types.TimeType.get())
+  );
+
+  @Test
+  public void testSimpleStructWithoutTime() {
+    generateAndValidate(new Schema(PRIMITIVE_WITHOUT_TIME.fields()));
+  }
+
+  @Test
+  public void testTimestampWithoutZone() {
+    generateAndValidate(new Schema(TIMESTAMP_WITHOUT_ZONE.fields()));
+  }
+
+  @Test
+  public void testTime() {
+    generateAndValidate(new Schema(TIME.fields()));
+  }
+
+  @Test
+  public void testNestedSchema() {
+    Types.StructType structType = Types.StructType.of(
+        required(0, "id", Types.LongType.get()),
+        required(1, "level1", Types.StructType.of(
+            optional(2, "level2", Types.StructType.of(
+                required(3, "level3", Types.StructType.of(
+                    optional(4, "level4", Types.StructType.of(
+                        required(5, "level5", Types.StructType.of(
+                            PRIMITIVE_WITHOUT_TIME.fields()
+                        ))
+                    ))
+                ))
+            ))
+        ))
+    );
+
+    generateAndValidate(new Schema(structType.fields()));
+  }
+
+  private void generateAndValidate(Schema schema) {
+    generateAndValidate(schema, Assert::assertEquals);
+  }
+
+  public interface AssertMethod {
+    void assertEquals(String message, StructLikeWrapper expected, 
StructLikeWrapper actual);
+  }
+
+  protected abstract void generateAndValidate(Schema schema, AssertMethod 
assertMethod);
+}
diff --git 
a/flink/src/test/java/org/apache/iceberg/flink/TestRowDataWrapper.java 
b/flink/src/test/java/org/apache/iceberg/flink/TestRowDataWrapper.java
new file mode 100644
index 0000000..9012fc5
--- /dev/null
+++ b/flink/src/test/java/org/apache/iceberg/flink/TestRowDataWrapper.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.flink;
+
+import java.util.Iterator;
+import org.apache.flink.table.data.RowData;
+import org.apache.iceberg.RecordWrapperTest;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.StructLike;
+import org.apache.iceberg.data.InternalRecordWrapper;
+import org.apache.iceberg.data.RandomGenericData;
+import org.apache.iceberg.data.Record;
+import org.apache.iceberg.flink.data.RandomRowData;
+import org.apache.iceberg.util.StructLikeWrapper;
+import org.junit.Assert;
+
+public class TestRowDataWrapper extends RecordWrapperTest {
+
+  /**
+   * Flink's time type has been truncated to millis seconds, so we need a 
customized assert method to check the
+   * values.
+   */
+  @Override
+  public void testTime() {
+    generateAndValidate(new Schema(TIME.fields()), (message, expectedWrapper, 
actualWrapper) -> {
+      for (int pos = 0; pos < TIME.fields().size(); pos++) {
+        Object expected = expectedWrapper.get().get(pos, Object.class);
+        Object actual = actualWrapper.get().get(pos, Object.class);
+        if (expected == actual) {
+          return;
+        }
+
+        if (expected == null || actual == null) {
+          Assert.fail(String.format("The expected value is %s but actual value 
is %s", expected, actual));
+        }
+
+        int expectedMilliseconds = (int) ((long) expected / 1000_000);
+        int actualMilliseconds = (int) ((long) actual / 1000_000);
+        Assert.assertEquals(message, expectedMilliseconds, actualMilliseconds);
+      }
+    });
+  }
+
+  @Override
+  protected void generateAndValidate(Schema schema, 
RecordWrapperTest.AssertMethod assertMethod) {
+    int numRecords = 100;
+    Iterable<Record> recordList = RandomGenericData.generate(schema, 
numRecords, 101L);
+    Iterable<RowData> rowDataList = RandomRowData.generate(schema, numRecords, 
101L);
+
+    InternalRecordWrapper recordWrapper = new 
InternalRecordWrapper(schema.asStruct());
+    RowDataWrapper rowDataWrapper = new 
RowDataWrapper(FlinkSchemaUtil.convert(schema), schema.asStruct());
+
+    Iterator<Record> actual = recordList.iterator();
+    Iterator<RowData> expected = rowDataList.iterator();
+
+    StructLikeWrapper actualWrapper = 
StructLikeWrapper.forType(schema.asStruct());
+    StructLikeWrapper expectedWrapper = 
StructLikeWrapper.forType(schema.asStruct());
+    for (int i = 0; i < numRecords; i++) {
+      Assert.assertTrue("Should have more records", actual.hasNext());
+      Assert.assertTrue("Should have more RowData", expected.hasNext());
+
+      StructLike recordStructLike = recordWrapper.wrap(actual.next());
+      StructLike rowDataStructLike = rowDataWrapper.wrap(expected.next());
+
+      assertMethod.assertEquals("Should have expected StructLike values",
+          actualWrapper.set(recordStructLike), 
expectedWrapper.set(rowDataStructLike));
+    }
+
+    Assert.assertFalse("Shouldn't have more record", actual.hasNext());
+    Assert.assertFalse("Shouldn't have more RowData", expected.hasNext());
+  }
+}
diff --git 
a/spark/src/test/java/org/apache/iceberg/spark/source/TestInternalRowWrapper.java
 
b/spark/src/test/java/org/apache/iceberg/spark/source/TestInternalRowWrapper.java
new file mode 100644
index 0000000..4ab0104
--- /dev/null
+++ 
b/spark/src/test/java/org/apache/iceberg/spark/source/TestInternalRowWrapper.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.spark.source;
+
+import java.util.Iterator;
+import org.apache.iceberg.RecordWrapperTest;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.StructLike;
+import org.apache.iceberg.data.InternalRecordWrapper;
+import org.apache.iceberg.data.RandomGenericData;
+import org.apache.iceberg.data.Record;
+import org.apache.iceberg.spark.SparkSchemaUtil;
+import org.apache.iceberg.spark.data.RandomData;
+import org.apache.iceberg.util.StructLikeWrapper;
+import org.apache.spark.sql.catalyst.InternalRow;
+import org.junit.Assert;
+import org.junit.Ignore;
+
+public class TestInternalRowWrapper extends RecordWrapperTest {
+
+  @Ignore
+  @Override
+  public void testTimestampWithoutZone() {
+    // Spark does not support timestamp without zone.
+  }
+
+  @Ignore
+  @Override
+  public void testTime() {
+    // Spark does not support time fields.
+  }
+
+  @Override
+  protected void generateAndValidate(Schema schema, AssertMethod assertMethod) 
{
+    int numRecords = 100;
+    Iterable<Record> recordList = RandomGenericData.generate(schema, 
numRecords, 101L);
+    Iterable<InternalRow> rowList = RandomData.generateSpark(schema, 
numRecords, 101L);
+
+    InternalRecordWrapper recordWrapper = new 
InternalRecordWrapper(schema.asStruct());
+    InternalRowWrapper rowWrapper = new 
InternalRowWrapper(SparkSchemaUtil.convert(schema));
+
+    Iterator<Record> actual = recordList.iterator();
+    Iterator<InternalRow> expected = rowList.iterator();
+
+    StructLikeWrapper actualWrapper = 
StructLikeWrapper.forType(schema.asStruct());
+    StructLikeWrapper expectedWrapper = 
StructLikeWrapper.forType(schema.asStruct());
+    for (int i = 0; i < numRecords; i++) {
+      Assert.assertTrue("Should have more records", actual.hasNext());
+      Assert.assertTrue("Should have more InternalRow", expected.hasNext());
+
+      StructLike recordStructLike = recordWrapper.wrap(actual.next());
+      StructLike rowStructLike = rowWrapper.wrap(expected.next());
+
+      assertMethod.assertEquals("Should have expected StructLike values",
+          actualWrapper.set(recordStructLike), 
expectedWrapper.set(rowStructLike));
+    }
+
+    Assert.assertFalse("Shouldn't have more record", actual.hasNext());
+    Assert.assertFalse("Shouldn't have more InternalRow", expected.hasNext());
+  }
+}

Reply via email to