This is an automated email from the ASF dual-hosted git repository.

sbadhya pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 2019718e099 HIVE-27671: Implement array_append UDF to append an 
element to array (#5121) (Taraka Rama Rao Lethavadla reviewed by Sourabh Badhya)
2019718e099 is described below

commit 2019718e099bc24fc5ce2927b3bb0e32fb5dc1a6
Author: tarak271 <[email protected]>
AuthorDate: Mon Mar 25 11:48:13 2024 +0530

    HIVE-27671: Implement array_append UDF to append an element to array 
(#5121) (Taraka Rama Rao Lethavadla reviewed by Sourabh Badhya)
---
 .../hadoop/hive/ql/exec/FunctionRegistry.java      |   1 +
 .../hive/ql/udf/generic/GenericUDFArrayAppend.java |  66 +++++++++
 .../ql/udf/generic/TestGenericUDFArrayAppend.java  | 155 +++++++++++++++++++++
 .../queries/clientnegative/udf_array_append_1.q    |   1 +
 .../queries/clientnegative/udf_array_append_2.q    |   1 +
 .../test/queries/clientpositive/udf_array_append.q |  44 ++++++
 .../clientnegative/udf_array_append_1.q.out        |   1 +
 .../clientnegative/udf_array_append_2.q.out        |   1 +
 .../clientpositive/llap/show_functions.q.out       |   2 +
 .../clientpositive/llap/udf_array_append.q.out     | 141 +++++++++++++++++++
 10 files changed, 413 insertions(+)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index e97b56b4dca..28f35c4a15f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -616,6 +616,7 @@ public final class FunctionRegistry {
     system.registerGenericUDF("array_union", GenericUDFArrayUnion.class);
     system.registerGenericUDF("array_remove", GenericUDFArrayRemove.class);
     system.registerGenericUDF("array_position", GenericUDFArrayPosition.class);
+    system.registerGenericUDF("array_append", GenericUDFArrayAppend.class);
     system.registerGenericUDF("deserialize", GenericUDFDeserialize.class);
     system.registerGenericUDF("sentences", GenericUDFSentences.class);
     system.registerGenericUDF("map_keys", GenericUDFMapKeys.class);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayAppend.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayAppend.java
new file mode 100644
index 00000000000..0a41ee56efa
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayAppend.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * GenericUDFArrayAppend.
+ */
+@Description(name = "array_append", value = "_FUNC_(array, element) - Returns 
an array appended by element.",
+    extended = "Example:\n" + "  > SELECT _FUNC_(array(1,3,4), 2) FROM src;\n" 
+ "  [1,3,4,2]")
+public class GenericUDFArrayAppend extends AbstractGenericUDFArrayBase {
+  private static final String FUNC_NAME = "ARRAY_APPEND";
+  private static final int ELEMENT_IDX = 1;
+
+  public GenericUDFArrayAppend() {
+    super(FUNC_NAME, 2, 2, ObjectInspector.Category.LIST);
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+    ObjectInspector defaultOI = super.initialize(arguments);
+    checkValueAndListElementTypes(arrayElementOI, FUNC_NAME, 
arguments[ELEMENT_IDX], ELEMENT_IDX);
+    return defaultOI;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    Object array = arguments[ARRAY_IDX].get();
+    Object value = arguments[ELEMENT_IDX].get();
+    int arrayLength = arrayOI.getListLength(array);
+    if (arrayLength == 0) {
+      return Collections.emptyList();
+    } else if (arrayLength < 0) {
+      return null;
+    }
+
+    List resultArray = new ArrayList<>(((ListObjectInspector) 
argumentOIs[ARRAY_IDX]).getList(array));
+    resultArray.add(value);
+    return resultArray.stream().map(o -> 
converter.convert(o)).collect(Collectors.toList());
+  }
+}
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArrayAppend.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArrayAppend.java
new file mode 100644
index 00000000000..2dc8fb328a5
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArrayAppend.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static java.util.Arrays.asList;
+
+public class TestGenericUDFArrayAppend {
+  private final GenericUDFArrayAppend udf = new GenericUDFArrayAppend();
+
+  @Test
+  public void testPrimitive() throws HiveException {
+    ObjectInspector[] inputOIs = { 
ObjectInspectorFactory.getStandardListObjectInspector(
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector),
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector };
+    udf.initialize(inputOIs);
+
+    Object i1 = new IntWritable(3);
+    Object i2 = new IntWritable(1);
+    Object i3 = new IntWritable(2);
+    Object i4 = new IntWritable(1);
+    Object i5 = new IntWritable(5);
+
+    runAndVerify(asList(i1, i2, i3, i4), i5, asList(i1, i2, i3, i4, i5));
+    i1 = new FloatWritable(3.3f);
+    i2 = new FloatWritable(1.1f);
+    i3 = new FloatWritable(3.3f);
+    i4 = new FloatWritable(2.20f);
+    i5 = new FloatWritable(5.20f);
+    runAndVerify(asList(i1, i2, i3, i4), i5, asList(i1, i2, i3, i4, i5));
+    runAndVerify(asList(i1, i2, i3, i4), null, asList(i1, i2, i3, i4, null)); 
//Test null element
+  }
+
+  @Test
+  public void testList() throws HiveException {
+    ObjectInspector[] inputOIs = { 
ObjectInspectorFactory.getStandardListObjectInspector(
+        ObjectInspectorFactory.getStandardListObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector)),
+        ObjectInspectorFactory.getStandardListObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector) };
+    udf.initialize(inputOIs);
+
+    Object i1 = asList(new Text("aa1"), new Text("dd"), new Text("cc"), new 
Text("bb"));
+    Object i2 = asList(new Text("aa2"), new Text("cc"), new Text("ba"), new 
Text("dd"));
+    Object i3 = asList(new Text("aa3"), new Text("cc"), new Text("dd"), new 
Text("ee"), new Text("bb"));
+    Object i4 = asList(new Text("aa4"), new Text("cc"), new Text("ddd"), new 
Text("bb"));
+    runAndVerify(asList(i1, i2, i2, i3, i4), i4, asList(i1, i2, i2, i3, i4, 
i4));
+  }
+
+  @Test
+  public void testStruct() throws HiveException {
+    ObjectInspector[] inputOIs = { 
ObjectInspectorFactory.getStandardListObjectInspector(
+        ObjectInspectorFactory.getStandardStructObjectInspector(asList("f1", 
"f2", "f3", "f4"),
+            
asList(PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+                PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+                PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+                ObjectInspectorFactory.getStandardListObjectInspector(
+                    
PrimitiveObjectInspectorFactory.writableIntObjectInspector)))),
+        ObjectInspectorFactory.getStandardStructObjectInspector(asList("f1", 
"f2", "f3", "f4"),
+            
asList(PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+                PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+                PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+                ObjectInspectorFactory.getStandardListObjectInspector(
+                    
PrimitiveObjectInspectorFactory.writableIntObjectInspector))) };
+    udf.initialize(inputOIs);
+
+    Object i1 = asList(new Text("a"), new DoubleWritable(3.1415), new 
DateWritableV2(Date.of(2015, 5, 26)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new 
IntWritable(4)));
+
+    Object i2 = asList(new Text("b"), new DoubleWritable(3.14), new 
DateWritableV2(Date.of(2015, 5, 26)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new 
IntWritable(4)));
+
+    Object i3 = asList(new Text("a"), new DoubleWritable(3.1415), new 
DateWritableV2(Date.of(2015, 5, 25)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new 
IntWritable(5)));
+
+    Object i4 = asList(new Text("a"), new DoubleWritable(3.1415), new 
DateWritableV2(Date.of(2015, 5, 25)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new 
IntWritable(4)));
+
+    runAndVerify(asList(i1, i3, i2, i3, i4), i2, asList(i1, i3, i2, i3, i4, 
i2));
+  }
+
+  @Test
+  public void testMap() throws HiveException {
+    ObjectInspector[] inputOIs = { 
ObjectInspectorFactory.getStandardListObjectInspector(
+        ObjectInspectorFactory.getStandardMapObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+            PrimitiveObjectInspectorFactory.writableIntObjectInspector)),
+        ObjectInspectorFactory.getStandardMapObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+            PrimitiveObjectInspectorFactory.writableIntObjectInspector) };
+    udf.initialize(inputOIs);
+
+    Map<Text, IntWritable> m1 = new HashMap<Text, IntWritable>();
+    m1.put(new Text("a"), new IntWritable(4));
+    m1.put(new Text("b"), new IntWritable(3));
+    m1.put(new Text("c"), new IntWritable(1));
+    m1.put(new Text("d"), new IntWritable(2));
+
+    Map<Text, IntWritable> m2 = new HashMap<Text, IntWritable>();
+    m2.put(new Text("d"), new IntWritable(4));
+    m2.put(new Text("b"), new IntWritable(3));
+    m2.put(new Text("a"), new IntWritable(1));
+    m2.put(new Text("c"), new IntWritable(2));
+
+    Map<Text, IntWritable> m3 = new HashMap<Text, IntWritable>();
+    m3.put(new Text("d"), new IntWritable(4));
+    m3.put(new Text("b"), new IntWritable(3));
+    m3.put(new Text("a"), new IntWritable(1));
+
+    runAndVerify(asList(m1, m3, m2, m3, m1), m2, asList(m1, m3, m2, m3, m1, 
m2));
+  }
+
+  private void runAndVerify(List<Object> actual, Object element, List<Object> 
expected) throws HiveException {
+    GenericUDF.DeferredJavaObject[] args =
+        { new GenericUDF.DeferredJavaObject(actual), new 
GenericUDF.DeferredJavaObject(element) };
+    List<Object> result = (List<Object>) udf.evaluate(args);
+    if (expected == null) {
+      Assert.assertNull(result);
+    } else {
+      Assert.assertArrayEquals("Check content", expected.toArray(), 
result.toArray());
+    }
+  }
+}
diff --git a/ql/src/test/queries/clientnegative/udf_array_append_1.q 
b/ql/src/test/queries/clientnegative/udf_array_append_1.q
new file mode 100644
index 00000000000..5cee7106ddc
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/udf_array_append_1.q
@@ -0,0 +1 @@
+SELECT array_append(3, 3);
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/udf_array_append_2.q 
b/ql/src/test/queries/clientnegative/udf_array_append_2.q
new file mode 100644
index 00000000000..21d0375c3d2
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/udf_array_append_2.q
@@ -0,0 +1 @@
+SELECT array_append(array(3), array(3));
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/udf_array_append.q 
b/ql/src/test/queries/clientpositive/udf_array_append.q
new file mode 100644
index 00000000000..a4c2c67fc94
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udf_array_append.q
@@ -0,0 +1,44 @@
+--! qt:dataset:src
+
+-- SORT_QUERY_RESULTS
+
+set hive.fetch.task.conversion=more;
+
+DESCRIBE FUNCTION array_append;
+DESCRIBE FUNCTION EXTENDED array_append;
+
+-- evaluates function for array of primitives
+SELECT array_append(array(1, 2, 3, null,3,4), 3);
+
+SELECT array_append(array(1.12, 2.23, 3.34, null,1.11,1.12,2.9),1.12);
+
+SELECT array(1,2,3),array_append(array(1, 2, 3),3);
+
+SELECT array(1,2,3),array_append(array(1, 2, 3),5);
+
+SELECT array_append(array(1, 2, 3), CAST(null AS int));
+
+SELECT array_append(array(1.1234567890, 2.234567890, 3.34567890, null, 
3.3456789, 2.234567,1.1234567890),1.1234567890);
+
+SELECT array_append(array(11234567890, 2234567890, 334567890, null, 
11234567890, 2234567890, 334567890, null),11234567890);
+
+SELECT 
array_append(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")),array("a","b","c","d"));
+
+SELECT array_append(array("aa","bb","cc"),"cc");
+
+# handle null array cases
+
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/test_null_array;
+
+dfs -copyFromLocal ../../data/files/test_null_array.csv 
${system:test.tmp.dir}/test_null_array/;
+
+create external table test_null_array (id string, value Array<String>) ROW 
FORMAT DELIMITED
+ FIELDS TERMINATED BY ':' collection items terminated by ',' location 
'${system:test.tmp.dir}/test_null_array';
+
+select id,value from test_null_array;
+
+select id, array_append(value,id) from test_null_array;
+
+select value, array_append(value,id) from test_null_array;
+
+dfs -rm -r ${system:test.tmp.dir}/test_null_array;
\ No newline at end of file
diff --git a/ql/src/test/results/clientnegative/udf_array_append_1.q.out 
b/ql/src/test/results/clientnegative/udf_array_append_1.q.out
new file mode 100644
index 00000000000..02edd70792e
--- /dev/null
+++ b/ql/src/test/results/clientnegative/udf_array_append_1.q.out
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10016]: Line 1:20 Argument type mismatch '3': 
"array" expected at function ARRAY_APPEND, but "int" is found
diff --git a/ql/src/test/results/clientnegative/udf_array_append_2.q.out 
b/ql/src/test/results/clientnegative/udf_array_append_2.q.out
new file mode 100644
index 00000000000..eeeaa1a1a8a
--- /dev/null
+++ b/ql/src/test/results/clientnegative/udf_array_append_2.q.out
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10016]: Line 1:30 Argument type mismatch '3': 
int type element is expected at function ARRAY_APPEND(array<int>,int), but 
array<int> is found
diff --git a/ql/src/test/results/clientpositive/llap/show_functions.q.out 
b/ql/src/test/results/clientpositive/llap/show_functions.q.out
index 24105d06010..b764c89e94d 100644
--- a/ql/src/test/results/clientpositive/llap/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/llap/show_functions.q.out
@@ -46,6 +46,7 @@ aes_encrypt
 and
 approx_distinct
 array
+array_append
 array_contains
 array_distinct
 array_except
@@ -675,6 +676,7 @@ aes_encrypt
 and
 approx_distinct
 array
+array_append
 array_contains
 array_distinct
 array_except
diff --git a/ql/src/test/results/clientpositive/llap/udf_array_append.q.out 
b/ql/src/test/results/clientpositive/llap/udf_array_append.q.out
new file mode 100644
index 00000000000..35694a7cacd
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/udf_array_append.q.out
@@ -0,0 +1,141 @@
+PREHOOK: query: DESCRIBE FUNCTION array_append
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION array_append
+POSTHOOK: type: DESCFUNCTION
+array_append(array, element) - Returns an array appended by element.
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED array_append
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED array_append
+POSTHOOK: type: DESCFUNCTION
+array_append(array, element) - Returns an array appended by element.
+Example:
+  > SELECT array_append(array(1,3,4), 2) FROM src;
+  [1,3,4,2]
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFArrayAppend
+Function type:BUILTIN
+PREHOOK: query: SELECT array_append(array(1, 2, 3, null,3,4), 3)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_append(array(1, 2, 3, null,3,4), 3)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[1,2,3,null,3,4,3]
+PREHOOK: query: SELECT array_append(array(1.12, 2.23, 3.34, 
null,1.11,1.12,2.9),1.12)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_append(array(1.12, 2.23, 3.34, 
null,1.11,1.12,2.9),1.12)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[1.12,2.23,3.34,null,1.11,1.12,2.9,1.12]
+PREHOOK: query: SELECT array(1,2,3),array_append(array(1, 2, 3),3)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array(1,2,3),array_append(array(1, 2, 3),3)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[1,2,3]        [1,2,3,3]
+PREHOOK: query: SELECT array(1,2,3),array_append(array(1, 2, 3),5)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array(1,2,3),array_append(array(1, 2, 3),5)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[1,2,3]        [1,2,3,5]
+PREHOOK: query: SELECT array_append(array(1, 2, 3), CAST(null AS int))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_append(array(1, 2, 3), CAST(null AS int))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[1,2,3,null]
+PREHOOK: query: SELECT array_append(array(1.1234567890, 2.234567890, 
3.34567890, null, 3.3456789, 2.234567,1.1234567890),1.1234567890)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_append(array(1.1234567890, 2.234567890, 
3.34567890, null, 3.3456789, 2.234567,1.1234567890),1.1234567890)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[1.123456789,2.23456789,3.3456789,null,3.3456789,2.234567,1.123456789,1.123456789]
+PREHOOK: query: SELECT array_append(array(11234567890, 2234567890, 334567890, 
null, 11234567890, 2234567890, 334567890, null),11234567890)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_append(array(11234567890, 2234567890, 334567890, 
null, 11234567890, 2234567890, 334567890, null),11234567890)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[11234567890,2234567890,334567890,null,11234567890,2234567890,334567890,null,11234567890]
+PREHOOK: query: SELECT 
array_append(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")),array("a","b","c","d"))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT 
array_append(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")),array("a","b","c","d"))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+[["a","b","c","d"],["a","b","c","d"],["a","b","c","d","e"],null,["e","a","b","c","d"],["a","b","c","d"]]
+PREHOOK: query: SELECT array_append(array("aa","bb","cc"),"cc")
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_append(array("aa","bb","cc"),"cc")
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+["aa","bb","cc","cc"]
+PREHOOK: query: create external table test_null_array (id string, value 
Array<String>) ROW FORMAT DELIMITED
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_null_array
+POSTHOOK: query: create external table test_null_array (id string, value 
Array<String>) ROW FORMAT DELIMITED
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_null_array
+PREHOOK: query: select id,value from test_null_array
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+POSTHOOK: query: select id,value from test_null_array
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+1      []
+2      ["NULL"]
+3      ["null","null"]
+PREHOOK: query: select id, array_append(value,id) from test_null_array
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+POSTHOOK: query: select id, array_append(value,id) from test_null_array
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+1      []
+2      ["NULL","2"]
+3      ["null","null","3"]
+PREHOOK: query: select value, array_append(value,id) from test_null_array
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+POSTHOOK: query: select value, array_append(value,id) from test_null_array
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+["NULL"]       ["NULL","2"]
+["null","null"]        ["null","null","3"]
+[]     []

Reply via email to