This is an automated email from the ASF dual-hosted git repository.
ayushsaxena pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 2f70ffff9a2 HIVE-26754: Implement array_distinct UDF to return an
array after removing duplicates in it. (#3806). (Taraka Rama Rao Lethavadla,
reviewed by Ayush Saxena, Sourabh Badhya)
2f70ffff9a2 is described below
commit 2f70ffff9a2cf84a8060b62bed68a24605c45824
Author: tarak271 <[email protected]>
AuthorDate: Fri Dec 23 00:37:56 2022 +0530
HIVE-26754: Implement array_distinct UDF to return an array after removing
duplicates in it. (#3806). (Taraka Rama Rao Lethavadla, reviewed by Ayush
Saxena, Sourabh Badhya)
---
data/files/test_null_array.csv | 3 +
.../hadoop/hive/ql/exec/FunctionRegistry.java | 1 +
.../ql/udf/generic/GenericUDFArrayDistinct.java | 58 ++++++++++
.../hive/ql/udf/generic/TestGenericUDFArray.java | 53 +++++++++
.../udf/generic/TestGenericUDFArrayDistinct.java | 127 +++++++++++++++++++++
.../queries/clientpositive/udf_array_distinct.q | 38 ++++++
.../clientpositive/llap/show_functions.q.out | 2 +
.../clientpositive/llap/udf_array_distinct.q.out | 112 ++++++++++++++++++
8 files changed, 394 insertions(+)
diff --git a/data/files/test_null_array.csv b/data/files/test_null_array.csv
new file mode 100644
index 00000000000..c2fc597d2d4
--- /dev/null
+++ b/data/files/test_null_array.csv
@@ -0,0 +1,3 @@
+1:
+2:NULL
+3:null,null
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 46c5eb3efa7..d2deb5d82ff 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -602,6 +602,7 @@ public final class FunctionRegistry {
system.registerGenericUDF("array_contains", GenericUDFArrayContains.class);
system.registerGenericUDF("array_min", GenericUDFArrayMin.class);
system.registerGenericUDF("array_max", GenericUDFArrayMax.class);
+ system.registerGenericUDF("array_distinct", GenericUDFArrayDistinct.class);
system.registerGenericUDF("deserialize", GenericUDFDeserialize.class);
system.registerGenericUDF("sentences", GenericUDFSentences.class);
system.registerGenericUDF("map_keys", GenericUDFMapKeys.class);
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayDistinct.java
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayDistinct.java
new file mode 100644
index 00000000000..7ae5b193a16
--- /dev/null
+++
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayDistinct.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Generic UDF for distinct array
+ * <code>ARRAY_DISTINCT(array(obj1, obj2, obj3...))</code>.
+ *
+ * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF
+ */
+@Description(name = "array_distinct", value = "_FUNC_(array(obj1, obj2,...)) -
"
+ + "The function returns an array of the same type as the input array with
distinct values.", extended = "Example:\n"
+ + " > SELECT _FUNC_(array('b', 'd', 'd', 'a')) FROM src LIMIT 1;\n"
+ + " ['b', 'd', 'a']") public class GenericUDFArrayDistinct extends
AbstractGenericUDFArrayBase {
+
+ public GenericUDFArrayDistinct() {
+ super("ARRAY_DISTINCT", 1, 1, ObjectInspector.Category.LIST);
+ }
+
+ @Override public Object evaluate(DeferredObject[] arguments) throws
HiveException {
+
+ Object array = arguments[ARRAY_IDX].get();
+
+ // If the array is empty, then there are no duplicates, return back the
empty array
+ if (arrayOI.getListLength(array) == 0) {
+ return Collections.emptyList();
+ } else if (arrayOI.getListLength(array) < 0) {
+ return null;
+ }
+
+ List<?> retArray = ((ListObjectInspector)
argumentOIs[ARRAY_IDX]).getList(array);
+ return retArray.stream().distinct().map(o ->
converter.convert(o)).collect(Collectors.toList());
+ }
+}
\ No newline at end of file
diff --git
a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArray.java
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArray.java
new file mode 100644
index 00000000000..c05c402a50e
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArray.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.List;
+
+public class TestGenericUDFArray {
+ protected AbstractGenericUDFArrayBase udf = null;
+
+ protected void runAndVerify(List<Object> actual, List<Object> expected)
throws HiveException {
+ GenericUDF.DeferredJavaObject[] args = { new
GenericUDF.DeferredJavaObject(actual) };
+ if (udf != null) {
+ List<?> result = (List<?>) udf.evaluate(args);
+ if ((null == actual)) {
+ Assert.assertEquals(actual, result);
+ } else {
+ Assert.assertArrayEquals("Check content", expected.toArray(),
result.toArray());
+ }
+ }
+ }
+
+ @Test public void testNullAndEmptyArray() throws HiveException {
+ ObjectInspector[] inputOIs = {
ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.writableVoidObjectInspector) };
+ if (udf != null) {
+ udf.initialize(inputOIs);
+ }
+ runAndVerify(null, null);
+ }
+}
diff --git
a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArrayDistinct.java
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArrayDistinct.java
new file mode 100644
index 00000000000..a520d36b81c
--- /dev/null
+++
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArrayDistinct.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static java.util.Arrays.asList;
+
+public class TestGenericUDFArrayDistinct extends TestGenericUDFArray {
+
+ public TestGenericUDFArrayDistinct() {
+ super.udf = new GenericUDFArrayDistinct();
+ }
+
+ @Test public void testPrimitive() throws HiveException {
+ ObjectInspector[] inputOIs = {
ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector) };
+ udf.initialize(inputOIs);
+
+ Object i1 = new IntWritable(3);
+ Object i2 = new IntWritable(1);
+ Object i3 = new IntWritable(2);
+ Object i4 = new IntWritable(1);
+ runAndVerify(asList(i1, i2, i3, i4), asList(i1, i2, i3));
+
+ i1 = new FloatWritable(3.3f);
+ i2 = new FloatWritable(1.1f);
+ i3 = new FloatWritable(3.3f);
+ i4 = new FloatWritable(2.20f);
+ runAndVerify(asList(i1, i2, i3, i4), asList(i1, i2, i4));
+ }
+
+ @Test public void testList() throws HiveException {
+ ObjectInspector[] inputOIs = {
ObjectInspectorFactory.getStandardListObjectInspector(
+ ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.writableStringObjectInspector)) };
+ udf.initialize(inputOIs);
+
+ Object i1 = asList(new Text("aa1"), new Text("dd"), new Text("cc"), new
Text("bb"));
+ Object i2 = asList(new Text("aa2"), new Text("cc"), new Text("ba"), new
Text("dd"));
+ Object i3 = asList(new Text("aa3"), new Text("cc"), new Text("dd"), new
Text("ee"), new Text("bb"));
+ Object i4 = asList(new Text("aa4"), new Text("cc"), new Text("ddd"), new
Text("bb"));
+ runAndVerify(asList(i1, i2, i2, i3, i4, i4), asList(i1, i2, i3, i4));
+ }
+
+ @Test public void testStruct() throws HiveException {
+ ObjectInspector[] inputOIs = {
ObjectInspectorFactory.getStandardListObjectInspector(
+ ObjectInspectorFactory.getStandardStructObjectInspector(asList("f1",
"f2", "f3", "f4"),
+
asList(PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+ PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+ PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+ ObjectInspectorFactory.getStandardListObjectInspector(
+
PrimitiveObjectInspectorFactory.writableIntObjectInspector)))) };
+ udf.initialize(inputOIs);
+
+ Object i1 = asList(new Text("a"), new DoubleWritable(3.1415), new
DateWritableV2(Date.of(2015, 5, 26)),
+ asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new
IntWritable(4)));
+
+ Object i2 = asList(new Text("b"), new DoubleWritable(3.14), new
DateWritableV2(Date.of(2015, 5, 26)),
+ asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new
IntWritable(4)));
+
+ Object i3 = asList(new Text("a"), new DoubleWritable(3.1415), new
DateWritableV2(Date.of(2015, 5, 25)),
+ asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new
IntWritable(5)));
+
+ Object i4 = asList(new Text("a"), new DoubleWritable(3.1415), new
DateWritableV2(Date.of(2015, 5, 25)),
+ asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new
IntWritable(4)));
+
+ runAndVerify(asList(i1, i3, i2, i3, i4, i2), asList(i1, i3, i2, i4));
+ }
+
+ @Test public void testMap() throws HiveException {
+ ObjectInspector[] inputOIs = {
ObjectInspectorFactory.getStandardListObjectInspector(
+ ObjectInspectorFactory.getStandardMapObjectInspector(
+ PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector)) };
+ udf.initialize(inputOIs);
+
+ Map<Text, IntWritable> m1 = new HashMap<>();
+ m1.put(new Text("a"), new IntWritable(4));
+ m1.put(new Text("b"), new IntWritable(3));
+ m1.put(new Text("c"), new IntWritable(1));
+ m1.put(new Text("d"), new IntWritable(2));
+
+ Map<Text, IntWritable> m2 = new HashMap<>();
+ m2.put(new Text("d"), new IntWritable(4));
+ m2.put(new Text("b"), new IntWritable(3));
+ m2.put(new Text("a"), new IntWritable(1));
+ m2.put(new Text("c"), new IntWritable(2));
+
+ Map<Text, IntWritable> m3 = new HashMap<>();
+ m3.put(new Text("d"), new IntWritable(4));
+ m3.put(new Text("b"), new IntWritable(3));
+ m3.put(new Text("a"), new IntWritable(1));
+
+ runAndVerify(asList(m1, m3, m2, m3, m1), asList(m1, m3, m2));
+ }
+
+}
diff --git a/ql/src/test/queries/clientpositive/udf_array_distinct.q
b/ql/src/test/queries/clientpositive/udf_array_distinct.q
new file mode 100644
index 00000000000..bf71c5ea166
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udf_array_distinct.q
@@ -0,0 +1,38 @@
+--! qt:dataset:src
+
+-- SORT_QUERY_RESULTS
+
+set hive.fetch.task.conversion=more;
+
+DESCRIBE FUNCTION array_distinct;
+DESCRIBE FUNCTION EXTENDED array_distinct;
+
+-- evalutes function for array of primitives
+SELECT array_distinct(array(1, 2, 3, null,3,4)) FROM src tablesample (1 rows);
+
+SELECT array_distinct(array()) FROM src tablesample (1 rows);
+
+SELECT array_distinct(array(null)) FROM src tablesample (1 rows);
+
+SELECT array_distinct(array(1.12, 2.23, 3.34, null,1.11,1.12,2.9)) FROM src
tablesample (1 rows);
+
+SELECT array_distinct(array(1.1234567890, 2.234567890, 3.34567890, null,
3.3456789, 2.234567,1.1234567890)) FROM src tablesample (1 rows);
+
+SELECT array_distinct(array(11234567890, 2234567890, 334567890, null,
11234567890, 2234567890, 334567890, null)) FROM src tablesample (1 rows);
+
+SELECT
array_distinct(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")))
FROM src tablesample (1 rows);
+
+# handle null array cases
+
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/test_null_array;
+
+dfs -copyFromLocal ../../data/files/test_null_array.csv
${system:test.tmp.dir}/test_null_array/;
+
+create external table test_null_array (id int, value Array<String>) ROW FORMAT
DELIMITED
+ FIELDS TERMINATED BY ':' collection items terminated by ',' location
'${system:test.tmp.dir}/test_null_array';
+
+select value from test_null_array;
+
+select array_distinct(value) from test_null_array;
+
+dfs -rm -r ${system:test.tmp.dir}/test_null_array;
\ No newline at end of file
diff --git a/ql/src/test/results/clientpositive/llap/show_functions.q.out
b/ql/src/test/results/clientpositive/llap/show_functions.q.out
index de0ced3fa6f..095b3bd9b04 100644
--- a/ql/src/test/results/clientpositive/llap/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/llap/show_functions.q.out
@@ -47,6 +47,7 @@ and
approx_distinct
array
array_contains
+array_distinct
array_max
array_min
ascii
@@ -664,6 +665,7 @@ and
approx_distinct
array
array_contains
+array_distinct
array_max
array_min
ascii
diff --git a/ql/src/test/results/clientpositive/llap/udf_array_distinct.q.out
b/ql/src/test/results/clientpositive/llap/udf_array_distinct.q.out
new file mode 100644
index 00000000000..6dec63e6d36
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/udf_array_distinct.q.out
@@ -0,0 +1,112 @@
+PREHOOK: query: DESCRIBE FUNCTION array_distinct
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION array_distinct
+POSTHOOK: type: DESCFUNCTION
+array_distinct(array(obj1, obj2,...)) - The function returns an array of the
same type as the input array with distinct values.
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED array_distinct
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED array_distinct
+POSTHOOK: type: DESCFUNCTION
+array_distinct(array(obj1, obj2,...)) - The function returns an array of the
same type as the input array with distinct values.
+Example:
+ > SELECT array_distinct(array('b', 'd', 'd', 'a')) FROM src LIMIT 1;
+ ['b', 'd', 'a']
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFArrayDistinct
+Function type:BUILTIN
+PREHOOK: query: SELECT array_distinct(array(1, 2, 3, null,3,4)) FROM src
tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_distinct(array(1, 2, 3, null,3,4)) FROM src
tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[1,2,3,null,4]
+PREHOOK: query: SELECT array_distinct(array()) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_distinct(array()) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[]
+PREHOOK: query: SELECT array_distinct(array(null)) FROM src tablesample (1
rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_distinct(array(null)) FROM src tablesample (1
rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[null]
+PREHOOK: query: SELECT array_distinct(array(1.12, 2.23, 3.34,
null,1.11,1.12,2.9)) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_distinct(array(1.12, 2.23, 3.34,
null,1.11,1.12,2.9)) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[1.12,2.23,3.34,null,1.11,2.9]
+PREHOOK: query: SELECT array_distinct(array(1.1234567890, 2.234567890,
3.34567890, null, 3.3456789, 2.234567,1.1234567890)) FROM src tablesample (1
rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_distinct(array(1.1234567890, 2.234567890,
3.34567890, null, 3.3456789, 2.234567,1.1234567890)) FROM src tablesample (1
rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[1.123456789,2.23456789,3.3456789,null,2.234567]
+PREHOOK: query: SELECT array_distinct(array(11234567890, 2234567890,
334567890, null, 11234567890, 2234567890, 334567890, null)) FROM src
tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_distinct(array(11234567890, 2234567890,
334567890, null, 11234567890, 2234567890, 334567890, null)) FROM src
tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[11234567890,2234567890,334567890,null]
+PREHOOK: query: SELECT
array_distinct(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")))
FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT
array_distinct(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")))
FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[["a","b","c","d"],["a","b","c","d","e"],null,["e","a","b","c","d"]]
+PREHOOK: query: create external table test_null_array (id int, value
Array<String>) ROW FORMAT DELIMITED
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_null_array
+POSTHOOK: query: create external table test_null_array (id int, value
Array<String>) ROW FORMAT DELIMITED
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_null_array
+PREHOOK: query: select value from test_null_array
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+POSTHOOK: query: select value from test_null_array
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+["NULL"]
+["null","null"]
+[]
+PREHOOK: query: select array_distinct(value) from test_null_array
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+POSTHOOK: query: select array_distinct(value) from test_null_array
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+["NULL"]
+["null"]
+[]