This is an automated email from the ASF dual-hosted git repository.

xiong pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/calcite.git


The following commit(s) were added to refs/heads/main by this push:
     new a94927e9b8 [CALCITE-6844] Add all Spark array functions which also 
support in Hive(enable in Hive Library)
a94927e9b8 is described below

commit a94927e9b80f9f5bf639e31c2636536cb6aebc1a
Author: xuyu <[email protected]>
AuthorDate: Mon Feb 17 23:23:19 2025 +0800

    [CALCITE-6844] Add all Spark array functions which also support in 
Hive(enable in Hive Library)
---
 .../calcite/sql/fun/SqlLibraryOperators.java       |  20 +-
 site/_docs/reference.md                            |  18 +-
 .../org/apache/calcite/test/SqlOperatorTest.java   | 556 +++++++++++----------
 3 files changed, 309 insertions(+), 285 deletions(-)

diff --git 
a/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java 
b/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java
index fce4d0bdc4..48f323dc1c 100644
--- a/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java
+++ b/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java
@@ -1381,7 +1381,7 @@ private static RelDataType 
arrayReturnType(SqlOperatorBinding opBinding) {
 
   /** The "ARRAY(exp, ...)" function (Spark);
    * compare with the standard array value constructor, "ARRAY [exp, ...]". */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY =
       SqlBasicFunction.create("ARRAY",
           SqlLibraryOperators::arrayReturnType,
@@ -1515,14 +1515,14 @@ private static RelDataType 
arrayCompactReturnType(SqlOperatorBinding opBinding)
           OperandTypes.ARRAY_ELEMENT_NONNULL);
 
   /** The "ARRAY_DISTINCT(array)" function. */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_DISTINCT =
       SqlBasicFunction.create(SqlKind.ARRAY_DISTINCT,
           ReturnTypes.ARG0_NULLABLE,
           OperandTypes.ARRAY);
 
   /** The "ARRAY_EXCEPT(array1, array2)" function. */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_EXCEPT =
       SqlBasicFunction.create(SqlKind.ARRAY_EXCEPT,
           ReturnTypes.LEAST_RESTRICTIVE,
@@ -1572,7 +1572,7 @@ private static RelDataType 
arrayInsertReturnType(SqlOperatorBinding opBinding) {
           OperandTypes.ARRAY_INSERT);
 
   /** The "ARRAY_INTERSECT(array1, array2)" function. */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_INTERSECT =
       SqlBasicFunction.create(SqlKind.ARRAY_INTERSECT,
           ReturnTypes.LEAST_RESTRICTIVE,
@@ -1582,7 +1582,7 @@ private static RelDataType 
arrayInsertReturnType(SqlOperatorBinding opBinding) {
               OperandTypes.family(SqlTypeFamily.ARRAY, SqlTypeFamily.ARRAY)));
 
   /** The "ARRAY_JOIN(array, delimiter [, nullText ])" function. */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_JOIN =
       SqlBasicFunction.create(SqlKind.ARRAY_JOIN,
           ReturnTypes.VARCHAR_NULLABLE,
@@ -1596,14 +1596,14 @@ private static RelDataType 
arrayInsertReturnType(SqlOperatorBinding opBinding) {
           OperandTypes.ARRAY);
 
   /** The "ARRAY_MAX(array)" function. */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_MAX =
       SqlBasicFunction.create(SqlKind.ARRAY_MAX,
           ReturnTypes.TO_COLLECTION_ELEMENT_FORCE_NULLABLE,
           OperandTypes.ARRAY_NONNULL);
 
-  /** The "ARRAY_MAX(array)" function. */
-  @LibraryOperator(libraries = {SPARK})
+  /** The "ARRAY_MIN(array)" function. */
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_MIN =
       SqlBasicFunction.create(SqlKind.ARRAY_MIN,
           ReturnTypes.TO_COLLECTION_ELEMENT_FORCE_NULLABLE,
@@ -1624,7 +1624,7 @@ private static RelDataType 
arrayInsertReturnType(SqlOperatorBinding opBinding) {
           OperandTypes.ARRAY_ELEMENT_NONNULL);
 
   /** The "ARRAY_REMOVE(array, element)" function. */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_REMOVE =
       SqlBasicFunction.create(SqlKind.ARRAY_REMOVE,
           ReturnTypes.ARG0_NULLABLE,
@@ -1663,7 +1663,7 @@ private static RelDataType 
arrayInsertReturnType(SqlOperatorBinding opBinding) {
               OperandTypes.ARRAY, OperandTypes.INTEGER, OperandTypes.INTEGER));
 
   /** The "ARRAY_UNION(array1, array2)" function. */
-  @LibraryOperator(libraries = {SPARK})
+  @LibraryOperator(libraries = {SPARK, HIVE})
   public static final SqlFunction ARRAY_UNION =
       SqlBasicFunction.create(SqlKind.ARRAY_UNION,
           ReturnTypes.LEAST_RESTRICTIVE,
diff --git a/site/_docs/reference.md b/site/_docs/reference.md
index 811cddc025..5891735399 100644
--- a/site/_docs/reference.md
+++ b/site/_docs/reference.md
@@ -2781,28 +2781,28 @@ ### Dialect-specific Operators
 | p | ACOSD(numeric)                                 | Returns the inverse 
cosine of *numeric* in degrees as a double. Returns NaN if *numeric* is NaN. 
Fails if *numeric* is less than -1.0 or greater than 1.0.
 | * | ACOSH(numeric)                                 | Returns the inverse 
hyperbolic cosine of *numeric*
 | o s | ADD_MONTHS(date, numMonths)                  | Returns the date that 
is *numMonths* after *date*
-| s | ARRAY([expr [, expr ]*])                       | Construct an array in 
Apache Spark. The function allows users to use `ARRAY()` to create an empty 
array
+| h s | ARRAY([expr [, expr ]*])                     | Construct an array in 
Apache Spark. The function allows users to use `ARRAY()` to create an empty 
array
 | s | ARRAY_APPEND(array, element)                   | Appends an *element* to 
the end of the *array* and returns the result. Type of *element* should be 
similar to type of the elements of the *array*. If the *array* is null, the 
function will return null. If an *element* that is null, the null *element* 
will be added to the end of the *array*
 | s | ARRAY_COMPACT(array)                           | Removes null values 
from the *array*
 | b | ARRAY_CONCAT(array [, array ]*)                | Concatenates one or 
more arrays. If any input argument is `NULL` the function returns `NULL`
 | s | ARRAY_CONTAINS(array, element)                 | Returns true if the 
*array* contains the *element*
-| s | ARRAY_DISTINCT(array)                          | Removes duplicate 
values from the *array* that keeps ordering of elements
-| s | ARRAY_EXCEPT(array1, array2)                   | Returns an array of the 
elements in *array1* but not in *array2*, without duplicates
+| h s | ARRAY_DISTINCT(array)                        | Removes duplicate 
values from the *array* that keeps ordering of elements
+| h s | ARRAY_EXCEPT(array1, array2)                 | Returns an array of the 
elements in *array1* but not in *array2*, without duplicates
 | s | ARRAY_INSERT(array, pos, element)              | Places *element* into 
index *pos* of *array*. Array index start at 1, or start from the end if index 
is negative. Index above array size appends the array, or prepends the array if 
index is negative, with `NULL` elements.
-| s | ARRAY_INTERSECT(array1, array2)                | Returns an array of the 
elements in the intersection of *array1* and *array2*, without duplicates
-| s | ARRAY_JOIN(array, delimiter [, nullText ])     | Synonym for 
`ARRAY_TO_STRING`
+| h s | ARRAY_INTERSECT(array1, array2)              | Returns an array of the 
elements in the intersection of *array1* and *array2*, without duplicates
+| h s | ARRAY_JOIN(array, delimiter [, nullText ])   | Synonym for 
`ARRAY_TO_STRING`
 | b | ARRAY_LENGTH(array)                            | Synonym for 
`CARDINALITY`
-| s | ARRAY_MAX(array)                               | Returns the maximum 
value in the *array*
-| s | ARRAY_MIN(array)                               | Returns the minimum 
value in the *array*
+| h s | ARRAY_MAX(array)                             | Returns the maximum 
value in the *array*
+| h s | ARRAY_MIN(array)                             | Returns the minimum 
value in the *array*
 | s | ARRAY_POSITION(array, element)                 | Returns the (1-based) 
index of the first *element* of the *array* as long
-| s | ARRAY_REMOVE(array, element)                   | Remove all elements 
that equal to *element* from the *array*
+| h s | ARRAY_REMOVE(array, element)                 | Remove all elements 
that equal to *element* from the *array*
 | s | ARRAY_PREPEND(array, element)                  | Appends an *element* to 
the beginning of the *array* and returns the result. Type of *element* should 
be similar to type of the elements of the *array*. If the *array* is null, the 
function will return null. If an *element* that is null, the null *element* 
will be added to the beginning of the *array*
 | s | ARRAY_REPEAT(element, count)                   | Returns the array 
containing element count times.
 | b | ARRAY_REVERSE(array)                           | Reverses elements of 
*array*
 | s | ARRAY_SIZE(array)                              | Synonym for 
`CARDINALITY`
 | h | ARRAY_SLICE(array, start, length)              | Returns the subset or 
range of elements.
 | b | ARRAY_TO_STRING(array, delimiter [, nullText ])| Returns a concatenation 
of the elements in *array* as a STRING and take *delimiter* as the delimiter. 
If the *nullText* parameter is used, the function replaces any `NULL` values in 
the array with the value of *nullText*. If the *nullText* parameter is not 
used, the function omits the `NULL` value and its preceding delimiter. Returns 
`NULL` if any argument is `NULL`
-| s | ARRAY_UNION(array1, array2)                    | Returns an array of the 
elements in the union of *array1* and *array2*, without duplicates
+| h s | ARRAY_UNION(array1, array2)                  | Returns an array of the 
elements in the union of *array1* and *array2*, without duplicates
 | s | ARRAYS_OVERLAP(array1, array2)                 | Returns true if *array1 
contains at least a non-null element present also in *array2*. If the arrays 
have no common element and they are both non-empty and either of them contains 
a null element null is returned, false otherwise
 | s | ARRAYS_ZIP(array [, array ]*)                  | Returns a merged 
*array* of structs in which the N-th struct contains all N-th values of input 
arrays
 | s | SORT_ARRAY(array [, ascendingOrder])           | Sorts the *array* in 
ascending or descending order according to the natural ordering of the array 
elements. The default order is ascending if *ascendingOrder* is not specified. 
Null elements will be placed at the beginning of the returned array in 
ascending order or at the end of the returned array in descending order
diff --git a/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java 
b/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java
index 9bd07895d5..2abf8b0e0d 100644
--- a/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java
+++ b/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java
@@ -8064,144 +8064,156 @@ void checkRegexpExtract(SqlOperatorFixture f0, 
FunctionAlias functionAlias) {
 
   /** Tests {@code ARRAY_CONTAINS} function from Spark. */
   @Test void testArrayContainsFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_CONTAINS);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_CONTAINS);
     f0.checkFails("^array_contains(array[1, 2], 1)^",
         "No match found for function signature "
             + "ARRAY_CONTAINS\\(<INTEGER ARRAY>, <NUMERIC>\\)", false);
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.SPARK);
 
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_contains(array[1, 2], 1)", true,
-        "BOOLEAN NOT NULL");
-    f.checkScalar("array_contains(array[1], 1)", true,
-        "BOOLEAN NOT NULL");
-    f.checkScalar("array_contains(array(), 1)", false,
-        "BOOLEAN NOT NULL");
-    f.checkScalar("array_contains(array[array[1, 2], array[3, 4]], array[1, 
2])", true,
-        "BOOLEAN NOT NULL");
-    f.checkScalar("array_contains(array[map[1, 'a'], map[2, 'b']], map[1, 
'a'])", true,
-        "BOOLEAN NOT NULL");
-    f.checkNull("array_contains(cast(null as integer array), 1)");
-    f.checkType("array_contains(cast(null as integer array), 1)", "BOOLEAN");
-    // Flink and Spark differ on the following. The expression
-    //   array_contains(array[1, null], cast(null as integer))
-    // returns TRUE in Flink, and returns UNKNOWN in Spark. The current
-    // function has Spark behavior, but if we supported a Flink function
-    // library (i.e. "fun=flink") we could add a function with Flink behavior.
-    f.checkNull("array_contains(array[1, null], cast(null as integer))");
-    f.checkType("array_contains(array[1, null], cast(null as integer))", 
"BOOLEAN");
-    f.checkFails("^array_contains(array[1, 2], true)^",
-        "INTEGER is not comparable to BOOLEAN", false);
-
-    // check null without cast
-    f.checkNull("array_contains(array[1, 2], null)");
-    f.checkFails("array_contains(^null^, array[1, 2])", "Illegal use of 
'NULL'", false);
-    f.checkFails("array_contains(^null^, null)", "Illegal use of 'NULL'", 
false);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_contains(array[1, 2], 1)", true,
+          "BOOLEAN NOT NULL");
+      f.checkScalar("array_contains(array[1], 1)", true,
+          "BOOLEAN NOT NULL");
+      f.checkScalar("array_contains(array(), 1)", false,
+          "BOOLEAN NOT NULL");
+      f.checkScalar("array_contains(array[array[1, 2], array[3, 4]], array[1, 
2])", true,
+          "BOOLEAN NOT NULL");
+      f.checkScalar("array_contains(array[map[1, 'a'], map[2, 'b']], map[1, 
'a'])", true,
+          "BOOLEAN NOT NULL");
+      f.checkNull("array_contains(cast(null as integer array), 1)");
+      f.checkType("array_contains(cast(null as integer array), 1)", "BOOLEAN");
+      // Flink and Spark differ on the following. The expression
+      //   array_contains(array[1, null], cast(null as integer))
+      // returns TRUE in Flink, and returns UNKNOWN in Spark. The current
+      // function has Spark behavior, but if we supported a Flink function
+      // library (i.e. "fun=flink") we could add a function with Flink 
behavior.
+      f.checkNull("array_contains(array[1, null], cast(null as integer))");
+      f.checkType("array_contains(array[1, null], cast(null as integer))", 
"BOOLEAN");
+      f.checkFails("^array_contains(array[1, 2], true)^",
+          "INTEGER is not comparable to BOOLEAN", false);
+
+      // check null without cast
+      f.checkNull("array_contains(array[1, 2], null)");
+      f.checkFails("array_contains(^null^, array[1, 2])", "Illegal use of 
'NULL'", false);
+      f.checkFails("array_contains(^null^, null)", "Illegal use of 'NULL'", 
false);
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
-  /** Tests {@code ARRAY_DISTINCT} function from Spark. */
+  /** Tests {@code ARRAY_DISTINCT} function from Spark, Hive. */
   @Test void testArrayDistinctFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_DISTINCT);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_DISTINCT);
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
     f0.checkFails("^array_distinct(array['foo'])^",
         "No match found for function signature ARRAY_DISTINCT\\(<CHAR\\(3\\) 
ARRAY>\\)", false);
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_distinct(array[1, 2, 2, 1])", "[1, 2]",
-        "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_distinct(array[null, 1, null])", "[null, 1]",
-        "INTEGER ARRAY NOT NULL");
-    f.checkNull("array_distinct(null)");
-    // elements cast
-    f.checkScalar("array_distinct(array[null, cast(1 as tinyint), 1, cast(2 as 
smallint)])",
-        "[null, 1, 2]", "INTEGER ARRAY NOT NULL");
-    f.checkScalar("array_distinct(array[null, cast(1 as tinyint), 1, cast(2 as 
bigint)])",
-        "[null, 1, 2]", "BIGINT ARRAY NOT NULL");
-    f.checkScalar("array_distinct(array[null, cast(1 as tinyint), 1, cast(2 as 
decimal)])",
-        "[null, 1, 2]", "DECIMAL(19, 0) ARRAY NOT NULL");
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_distinct(array[1, 2, 2, 1])", "[1, 2]",
+          "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_distinct(array[null, 1, null])", "[null, 1]",
+          "INTEGER ARRAY NOT NULL");
+      f.checkNull("array_distinct(null)");
+      // elements cast
+      f.checkScalar("array_distinct(array[null, cast(1 as tinyint), 1, cast(2 
as smallint)])",
+          "[null, 1, 2]", "INTEGER ARRAY NOT NULL");
+      f.checkScalar("array_distinct(array[null, cast(1 as tinyint), 1, cast(2 
as bigint)])",
+          "[null, 1, 2]", "BIGINT ARRAY NOT NULL");
+      f.checkScalar("array_distinct(array[null, cast(1 as tinyint), 1, cast(2 
as decimal)])",
+          "[null, 1, 2]", "DECIMAL(19, 0) ARRAY NOT NULL");
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
   @Test void testArrayJoinFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_JOIN);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_JOIN);
     f0.checkFails("^array_join(array['aa', 'b', 'c'], '-')^", "No match found 
for function"
         + " signature ARRAY_JOIN\\(<CHAR\\(2\\) ARRAY>, <CHARACTER>\\)", 
false);
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_join(array['aa', 'b', 'c'], '-')", "aa-b -c ",
+          "VARCHAR NOT NULL");
+      f.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-', 
'empty')",
+          "empty-aa-empty-b -empty", "VARCHAR NOT NULL");
+      f.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-')", 
"aa-b ",
+          "VARCHAR NOT NULL");
+      f.checkScalar("array_join(array[null, x'aa', null, x'bb', null], '-')", 
"aa-bb",
+          "VARCHAR NOT NULL");
+      f.checkScalar("array_join(array['', 'b'], '-')", " -b", "VARCHAR NOT 
NULL");
+      f.checkScalar("array_join(array['', ''], '-')", "-", "VARCHAR NOT NULL");
 
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_join(array['aa', 'b', 'c'], '-')", "aa-b -c ",
-        "VARCHAR NOT NULL");
-    f.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-', 
'empty')",
-        "empty-aa-empty-b -empty", "VARCHAR NOT NULL");
-    f.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-')", "aa-b 
",
-        "VARCHAR NOT NULL");
-    f.checkScalar("array_join(array[null, x'aa', null, x'bb', null], '-')", 
"aa-bb",
-        "VARCHAR NOT NULL");
-    f.checkScalar("array_join(array['', 'b'], '-')", " -b", "VARCHAR NOT 
NULL");
-    f.checkScalar("array_join(array['', ''], '-')", "-", "VARCHAR NOT NULL");
-
-    final SqlOperatorFixture f1 =
-        f.withConformance(SqlConformanceEnum.PRAGMATIC_2003);
-    f1.checkScalar("array_join(array['aa', 'b', 'c'], '-')", "aa-b-c",
-        "VARCHAR NOT NULL");
-    f1.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-', 
'empty')",
-        "empty-aa-empty-b-empty", "VARCHAR NOT NULL");
-    f1.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-')", 
"aa-b",
-        "VARCHAR NOT NULL");
-    f1.checkScalar("array_join(array[null, x'aa', null, x'bb', null], '-')", 
"aa-bb",
-        "VARCHAR NOT NULL");
-    f1.checkScalar("array_join(array['', 'b'], '-')", "-b", "VARCHAR NOT 
NULL");
-    f1.checkScalar("array_join(array['', ''], '-')", "-", "VARCHAR NOT NULL");
-
-    f.checkNull("array_join(null, '-')");
-    f.checkNull("array_join(array['a', 'b', null], null)");
-    f.checkFails("^array_join(array[1, 2, 3], '-', ' ')^",
-        "Cannot apply 'ARRAY_JOIN' to arguments of type 'ARRAY_JOIN\\("
-            + "<INTEGER ARRAY>, <CHAR\\(1\\)>, <CHAR\\(1\\)>\\)'\\. Supported 
form\\(s\\):"
-            + " ARRAY_JOIN\\(<STRING ARRAY>, <CHARACTER>\\[, 
<CHARACTER>\\]\\)", false);
+      final SqlOperatorFixture f1 =
+          f.withConformance(SqlConformanceEnum.PRAGMATIC_2003);
+      f1.checkScalar("array_join(array['aa', 'b', 'c'], '-')", "aa-b-c",
+          "VARCHAR NOT NULL");
+      f1.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-', 
'empty')",
+          "empty-aa-empty-b-empty", "VARCHAR NOT NULL");
+      f1.checkScalar("array_join(array[null, 'aa', null, 'b', null], '-')", 
"aa-b",
+          "VARCHAR NOT NULL");
+      f1.checkScalar("array_join(array[null, x'aa', null, x'bb', null], '-')", 
"aa-bb",
+          "VARCHAR NOT NULL");
+      f1.checkScalar("array_join(array['', 'b'], '-')", "-b", "VARCHAR NOT 
NULL");
+      f1.checkScalar("array_join(array['', ''], '-')", "-", "VARCHAR NOT 
NULL");
+
+      f.checkNull("array_join(null, '-')");
+      f.checkNull("array_join(array['a', 'b', null], null)");
+      f.checkFails("^array_join(array[1, 2, 3], '-', ' ')^",
+          "Cannot apply 'ARRAY_JOIN' to arguments of type 'ARRAY_JOIN\\("
+              + "<INTEGER ARRAY>, <CHAR\\(1\\)>, <CHAR\\(1\\)>\\)'\\. 
Supported form\\(s\\):"
+              + " ARRAY_JOIN\\(<STRING ARRAY>, <CHARACTER>\\[, 
<CHARACTER>\\]\\)", false);
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
-  /** Tests {@code ARRAY_MAX} function from Spark. */
+  /** Tests {@code ARRAY_MAX} function from Spark, Hive. */
   @Test void testArrayMaxFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_MAX);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_MAX);
     f0.checkFails("^array_max(array[1, 2])^",
         "No match found for function signature ARRAY_MAX\\(<INTEGER 
ARRAY>\\)", false);
-
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_max(array[1, 2])", "2", "INTEGER");
-    f.checkScalar("array_max(array[1, 2, null])", "2", "INTEGER");
-    f.checkScalar("array_max(array[1])", "1", "INTEGER");
-    f.checkType("array_max(array())", "UNKNOWN");
-    f.checkNull("array_max(array())");
-    f.checkNull("array_max(cast(null as integer array))");
-    // elements cast
-    f.checkScalar("array_max(array[null, 1, cast(2 as tinyint)])", "2",
-        "INTEGER");
-    f.checkScalar("array_max(array[null, 1, cast(2 as bigint)])", "2",
-        "BIGINT");
-    f.checkScalar("array_max(array[null, 1, cast(2 as decimal)])", "2",
-        "DECIMAL(19, 0)");
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_max(array[1, 2])", "2", "INTEGER");
+      f.checkScalar("array_max(array[1, 2, null])", "2", "INTEGER");
+      f.checkScalar("array_max(array[1])", "1", "INTEGER");
+      f.checkType("array_max(array())", "UNKNOWN");
+      f.checkNull("array_max(array())");
+      f.checkNull("array_max(cast(null as integer array))");
+      // elements cast
+      f.checkScalar("array_max(array[null, 1, cast(2 as tinyint)])", "2",
+          "INTEGER");
+      f.checkScalar("array_max(array[null, 1, cast(2 as bigint)])", "2",
+          "BIGINT");
+      f.checkScalar("array_max(array[null, 1, cast(2 as decimal)])", "2",
+          "DECIMAL(19, 0)");
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
-  /** Tests {@code ARRAY_MIN} function from Spark. */
+  /** Tests {@code ARRAY_MIN} function from Spark, Hive. */
   @Test void testArrayMinFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_MIN);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_MIN);
     f0.checkFails("^array_min(array[1, 2])^",
         "No match found for function signature ARRAY_MIN\\(<INTEGER 
ARRAY>\\)", false);
-
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_min(array[1, 2])", "1", "INTEGER");
-    f.checkScalar("array_min(array[1, 2, null])", "1", "INTEGER");
-    f.checkType("array_min(array())", "UNKNOWN");
-    f.checkNull("array_min(array())");
-    f.checkNull("array_min(cast(null as integer array))");
-    // elements cast
-    f.checkScalar("array_min(array[null, 1, cast(2 as tinyint)])", "1",
-        "INTEGER");
-    f.checkScalar("array_min(array[null, 1, cast(2 as bigint)])", "1",
-        "BIGINT");
-    f.checkScalar("array_min(array[null, 1, cast(2 as decimal)])", "1",
-        "DECIMAL(19, 0)");
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_min(array[1, 2])", "1", "INTEGER");
+      f.checkScalar("array_min(array[1, 2, null])", "1", "INTEGER");
+      f.checkType("array_min(array())", "UNKNOWN");
+      f.checkNull("array_min(array())");
+      f.checkNull("array_min(cast(null as integer array))");
+      // elements cast
+      f.checkScalar("array_min(array[null, 1, cast(2 as tinyint)])", "1",
+          "INTEGER");
+      f.checkScalar("array_min(array[null, 1, cast(2 as bigint)])", "1",
+          "BIGINT");
+      f.checkScalar("array_min(array[null, 1, cast(2 as decimal)])", "1",
+          "DECIMAL(19, 0)");
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
   /** Test case for
@@ -8339,43 +8351,45 @@ void checkRegexpExtract(SqlOperatorFixture f0, 
FunctionAlias functionAlias) {
         "BIGINT ARRAY NOT NULL");
   }
 
-  /** Tests {@code ARRAY_REMOVE} function from Spark. */
+  /** Tests {@code ARRAY_REMOVE} function from Spark, Hive. */
   @Test void testArrayRemoveFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_REMOVE);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_REMOVE);
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
     f0.checkFails("^array_remove(array[1], 1)^",
         "No match found for function signature ARRAY_REMOVE\\("
             + "<INTEGER ARRAY>, <NUMERIC>\\)", false);
-
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_remove(array[1], 1)", "[]",
-        "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_remove(array[1, 2, 1], 1)", "[2]",
-        "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_remove(array[1, 2, null], 1)", "[2, null]",
-        "INTEGER ARRAY NOT NULL");
-    f.checkScalar("array_remove(array[1, 2, null], 3)", "[1, 2, null]",
-        "INTEGER ARRAY NOT NULL");
-    f.checkScalar("array_remove(array(null), 1)", "[null]",
-        "NULL ARRAY NOT NULL");
-    f.checkScalar("array_remove(array(), 1)", "[]",
-        "UNKNOWN NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_remove(array[array[1, 2]], array[1, 2])", "[]",
-        "INTEGER NOT NULL ARRAY NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_remove(array[map[1, 'a']], map[1, 'a'])", "[]",
-        "(INTEGER NOT NULL, CHAR(1) NOT NULL) MAP NOT NULL ARRAY NOT NULL");
-    f.checkNull("array_remove(cast(null as integer array), 1)");
-    f.checkType("array_remove(cast(null as integer array), 1)", "INTEGER NOT 
NULL ARRAY");
-
-    // Flink and Spark differ on the following. The expression
-    //   array_remove(array[1, null], cast(null as integer))
-    // returns [1] in Flink, and returns null in Spark. The current
-    // function has Spark behavior, but if we supported a Flink function
-    // library (i.e. "fun=flink") we could add a function with Flink behavior.
-    f.checkNull("array_remove(array[1, null], cast(null as integer))");
-    f.checkType("array_remove(array[1, null], cast(null as integer))", 
"INTEGER ARRAY");
-    f.checkFails("^array_remove(array[1, 2], true)^",
-        "INTEGER is not comparable to BOOLEAN", false);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_remove(array[1], 1)", "[]",
+          "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_remove(array[1, 2, 1], 1)", "[2]",
+          "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_remove(array[1, 2, null], 1)", "[2, null]",
+          "INTEGER ARRAY NOT NULL");
+      f.checkScalar("array_remove(array[1, 2, null], 3)", "[1, 2, null]",
+          "INTEGER ARRAY NOT NULL");
+      f.checkScalar("array_remove(array(null), 1)", "[null]",
+          "NULL ARRAY NOT NULL");
+      f.checkScalar("array_remove(array(), 1)", "[]",
+          "UNKNOWN NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_remove(array[array[1, 2]], array[1, 2])", "[]",
+          "INTEGER NOT NULL ARRAY NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_remove(array[map[1, 'a']], map[1, 'a'])", "[]",
+          "(INTEGER NOT NULL, CHAR(1) NOT NULL) MAP NOT NULL ARRAY NOT NULL");
+      f.checkNull("array_remove(cast(null as integer array), 1)");
+      f.checkType("array_remove(cast(null as integer array), 1)", "INTEGER NOT 
NULL ARRAY");
+
+      // Flink and Spark differ on the following. The expression
+      //   array_remove(array[1, null], cast(null as integer))
+      // returns [1] in Flink, and returns null in Spark. The current
+      // function has Spark behavior, but if we supported a Flink function
+      // library (i.e. "fun=flink") we could add a function with Flink 
behavior.
+      f.checkNull("array_remove(array[1, null], cast(null as integer))");
+      f.checkType("array_remove(array[1, null], cast(null as integer))", 
"INTEGER ARRAY");
+      f.checkFails("^array_remove(array[1, 2], true)^",
+          "INTEGER is not comparable to BOOLEAN", false);
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
   /** Tests {@code ARRAY_REPEAT} function from Spark. */
@@ -8545,38 +8559,40 @@ void checkArrayReverseFunc(SqlOperatorFixture f0, 
SqlFunction function,
     f1.checkScalar("array_to_string(array['', ''], '-')", "-", "VARCHAR NOT 
NULL");
   }
 
-  /** Tests {@code ARRAY_EXCEPT} function from Spark. */
+  /** Tests {@code ARRAY_EXCEPT} function from Spark, Hive. */
   @Test void testArrayExceptFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_EXCEPT);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_EXCEPT);
     f0.checkFails("^array_except(array[2, null, 3, 3], array[1, 2, null])^",
         "No match found for function signature "
             + "ARRAY_EXCEPT\\(<INTEGER ARRAY>, <INTEGER ARRAY>\\)", false);
-
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_except(array[2, 3, 3], array[2])",
-        "[3]", "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_except(array[2], array[2, 3])",
-        "[]", "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_except(array[2, null, 3, 3], array[1, 2, null])",
-        "[3]", "INTEGER ARRAY NOT NULL");
-    f.checkNull("array_except(cast(null as integer array), array[1])");
-    f.checkNull("array_except(array[1], cast(null as integer array))");
-    f.checkNull("array_except(cast(null as integer array), cast(null as 
integer array))");
-
-    // check null without cast
-    f.checkFails("^array_except(array[1, 2], null)^",
-        "Cannot apply 'ARRAY_EXCEPT' to arguments of type 
'ARRAY_EXCEPT\\(<INTEGER ARRAY>, "
-            + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_EXCEPT\\(<EQUIVALENT_TYPE>, "
-            + "<EQUIVALENT_TYPE>\\)'", false);
-    f.checkFails("^array_except(null, array[1, 2])^",
-        "Cannot apply 'ARRAY_EXCEPT' to arguments of type 
'ARRAY_EXCEPT\\(<NULL>, "
-            + "<INTEGER ARRAY>\\)'\\. Supported form\\(s\\): 
'ARRAY_EXCEPT\\(<EQUIVALENT_TYPE>, "
-            + "<EQUIVALENT_TYPE>\\)'", false);
-    f.checkFails("^array_except(null, null)^",
-        "Cannot apply 'ARRAY_EXCEPT' to arguments of type 
'ARRAY_EXCEPT\\(<NULL>, "
-            + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_EXCEPT\\(<EQUIVALENT_TYPE>, "
-            + "<EQUIVALENT_TYPE>\\)'", false);
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_except(array[2, 3, 3], array[2])",
+          "[3]", "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_except(array[2], array[2, 3])",
+          "[]", "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_except(array[2, null, 3, 3], array[1, 2, null])",
+          "[3]", "INTEGER ARRAY NOT NULL");
+      f.checkNull("array_except(cast(null as integer array), array[1])");
+      f.checkNull("array_except(array[1], cast(null as integer array))");
+      f.checkNull("array_except(cast(null as integer array), cast(null as 
integer array))");
+
+      // check null without cast
+      f.checkFails("^array_except(array[1, 2], null)^",
+          "Cannot apply 'ARRAY_EXCEPT' to arguments of type 
'ARRAY_EXCEPT\\(<INTEGER ARRAY>, "
+              + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_EXCEPT\\(<EQUIVALENT_TYPE>, "
+              + "<EQUIVALENT_TYPE>\\)'", false);
+      f.checkFails("^array_except(null, array[1, 2])^",
+          "Cannot apply 'ARRAY_EXCEPT' to arguments of type 
'ARRAY_EXCEPT\\(<NULL>, "
+              + "<INTEGER ARRAY>\\)'\\. Supported form\\(s\\): 
'ARRAY_EXCEPT\\(<EQUIVALENT_TYPE>, "
+              + "<EQUIVALENT_TYPE>\\)'", false);
+      f.checkFails("^array_except(null, null)^",
+          "Cannot apply 'ARRAY_EXCEPT' to arguments of type 
'ARRAY_EXCEPT\\(<NULL>, "
+              + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_EXCEPT\\(<EQUIVALENT_TYPE>, "
+              + "<EQUIVALENT_TYPE>\\)'", false);
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
   /** Tests {@code ARRAY_INSERT} function from Spark. */
@@ -8672,56 +8688,60 @@ void checkArrayReverseFunc(SqlOperatorFixture f0, 
SqlFunction function,
         "[1, 2, null, 3]", "INTEGER ARRAY NOT NULL");
   }
 
-  /** Tests {@code ARRAY_INTERSECT} function from Spark. */
+  /** Tests {@code ARRAY_INTERSECT} function from Spark, Hive. */
   @Test void testArrayIntersectFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_INTERSECT);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_INTERSECT);
     f0.checkFails("^array_intersect(array[2, null, 2], array[1, 2, null])^",
         "No match found for function signature "
             + "ARRAY_INTERSECT\\(<INTEGER ARRAY>, <INTEGER ARRAY>\\)", false);
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_intersect(array[2, 3, 3], array[3])",
+          "[3]", "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_intersect(array[1], array[2, 3])",
+          "[]", "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_intersect(array[2, null, 2], array[1, 2, null])",
+          "[2, null]", "INTEGER ARRAY NOT NULL");
+      f.checkNull("array_intersect(cast(null as integer array), array[1])");
+      f.checkNull("array_intersect(array[1], cast(null as integer array))");
+      f.checkNull("array_intersect(cast(null as integer array), cast(null as 
integer array))");
+
+      // check null without cast
+      f.checkFails("^array_intersect(array[1, 2], null)^",
+          "Cannot apply 'ARRAY_INTERSECT' to arguments of type 
'ARRAY_INTERSECT\\(<INTEGER ARRAY>, "
+              + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_INTERSECT\\(<EQUIVALENT_TYPE>, "
+              + "<EQUIVALENT_TYPE>\\)'", false);
+      f.checkFails("^array_intersect(null, array[1, 2])^",
+          "Cannot apply 'ARRAY_INTERSECT' to arguments of type 
'ARRAY_INTERSECT\\(<NULL>, "
+              + "<INTEGER ARRAY>\\)'\\. Supported form\\(s\\): 
'ARRAY_INTERSECT\\(<EQUIVALENT_TYPE>, "
+              + "<EQUIVALENT_TYPE>\\)'", false);
+      f.checkFails("^array_intersect(null, null)^",
+          "Cannot apply 'ARRAY_INTERSECT' to arguments of type 
'ARRAY_INTERSECT\\(<NULL>, "
+              + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_INTERSECT\\(<EQUIVALENT_TYPE>, "
+              + "<EQUIVALENT_TYPE>\\)'", false);
+    };
+    f0.forEachLibrary(libraries, consumer);
+  }
 
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_intersect(array[2, 3, 3], array[3])",
-        "[3]", "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_intersect(array[1], array[2, 3])",
-        "[]", "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_intersect(array[2, null, 2], array[1, 2, null])",
-        "[2, null]", "INTEGER ARRAY NOT NULL");
-    f.checkNull("array_intersect(cast(null as integer array), array[1])");
-    f.checkNull("array_intersect(array[1], cast(null as integer array))");
-    f.checkNull("array_intersect(cast(null as integer array), cast(null as 
integer array))");
-
-    // check null without cast
-    f.checkFails("^array_intersect(array[1, 2], null)^",
-        "Cannot apply 'ARRAY_INTERSECT' to arguments of type 
'ARRAY_INTERSECT\\(<INTEGER ARRAY>, "
-            + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_INTERSECT\\(<EQUIVALENT_TYPE>, "
-            + "<EQUIVALENT_TYPE>\\)'", false);
-    f.checkFails("^array_intersect(null, array[1, 2])^",
-        "Cannot apply 'ARRAY_INTERSECT' to arguments of type 
'ARRAY_INTERSECT\\(<NULL>, "
-            + "<INTEGER ARRAY>\\)'\\. Supported form\\(s\\): 
'ARRAY_INTERSECT\\(<EQUIVALENT_TYPE>, "
-            + "<EQUIVALENT_TYPE>\\)'", false);
-    f.checkFails("^array_intersect(null, null)^",
-        "Cannot apply 'ARRAY_INTERSECT' to arguments of type 
'ARRAY_INTERSECT\\(<NULL>, "
-            + "<NULL>\\)'\\. Supported form\\(s\\): 
'ARRAY_INTERSECT\\(<EQUIVALENT_TYPE>, "
-            + "<EQUIVALENT_TYPE>\\)'", false);
-  }
-
-  /** Tests {@code ARRAY_UNION} function from Spark. */
+  /** Tests {@code ARRAY_UNION} function from Spark, Hive. */
   @Test void testArrayUnionFunc() {
-    final SqlOperatorFixture f0 = fixture();
-    f0.setFor(SqlLibraryOperators.ARRAY_UNION);
+    final SqlOperatorFixture f0 = 
fixture().setFor(SqlLibraryOperators.ARRAY_UNION);
     f0.checkFails("^array_union(array[2, null, 2], array[1, 2, null])^",
         "No match found for function signature "
             + "ARRAY_UNION\\(<INTEGER ARRAY>, <INTEGER ARRAY>\\)", false);
-
-    final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK);
-    f.checkScalar("array_intersect(array[2, 3, 3], array[3])",
-        "[3]", "INTEGER NOT NULL ARRAY NOT NULL");
-    f.checkScalar("array_union(array[2, null, 2], array[1, 2, null])",
-        "[2, null, 1]", "INTEGER ARRAY NOT NULL");
-    f.checkNull("array_union(cast(null as integer array), array[1])");
-    f.checkNull("array_union(array[1], cast(null as integer array))");
-    f.checkNull("array_union(cast(null as integer array), cast(null as integer 
array))");
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
+    final Consumer<SqlOperatorFixture> consumer = f -> {
+      f.checkScalar("array_intersect(array[2, 3, 3], array[3])",
+          "[3]", "INTEGER NOT NULL ARRAY NOT NULL");
+      f.checkScalar("array_union(array[2, null, 2], array[1, 2, null])",
+          "[2, null, 1]", "INTEGER ARRAY NOT NULL");
+      f.checkNull("array_union(cast(null as integer array), array[1])");
+      f.checkNull("array_union(array[1], cast(null as integer array))");
+      f.checkNull("array_union(cast(null as integer array), cast(null as 
integer array))");
+    };
+    f0.forEachLibrary(libraries, consumer);
   }
 
   /** Tests {@code ARRAYS_OVERLAP} function from Spark. */
@@ -13098,64 +13118,68 @@ private static void 
checkArrayConcatAggFuncFails(SqlOperatorFixture t) {
     f.checkFails("^array[1, '1', true]^", "Parameters must be of the same 
type", false);
   }
 
-  /** Test case for {@link SqlLibraryOperators#ARRAY} (Spark). */
+  /** Test case for {@link SqlLibraryOperators#ARRAY} (Spark, Hive). */
   @Test void testArrayFunction() {
     final SqlOperatorFixture f = fixture();
     f.setFor(SqlLibraryOperators.ARRAY, VmName.EXPAND);
+    final List<SqlLibrary> libraries =
+        ImmutableList.of(SqlLibrary.HIVE, SqlLibrary.SPARK);
     f.checkFails("^array()^",
         "No match found for function signature ARRAY\\(\\)", false);
     f.checkFails("^array('foo')^",
         "No match found for function signature ARRAY\\(<CHARACTER>\\)", false);
-    final SqlOperatorFixture f2 = f.withLibrary(SqlLibrary.SPARK);
-    f2.checkScalar("array('foo')",
-        "[foo]", "CHAR(3) NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array('foo', 'bar')",
-        "[foo, bar]", "CHAR(3) NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array()",
-        "[]", "UNKNOWN NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array('foo', null)",
-        "[foo, null]", "CHAR(3) ARRAY NOT NULL");
-    f2.checkScalar("array(null, 'foo')",
-        "[null, foo]", "CHAR(3) ARRAY NOT NULL");
-    f2.checkScalar("array(null)",
-        "[null]", "NULL ARRAY NOT NULL");
-    // check complex type
-    f2.checkScalar("array(row(1))", "[{1}]",
-        "RecordType(INTEGER NOT NULL EXPR$0) NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array(row(1, null))", "[{1, null}]",
-        "RecordType(INTEGER NOT NULL EXPR$0, NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
-    f2.checkScalar("array(row(null, 1))", "[{null, 1}]",
-        "RecordType(NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
-    f2.checkScalar("array(row(1, 2))", "[{1, 2}]",
-        "RecordType(INTEGER NOT NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT NULL 
ARRAY NOT NULL");
-    f2.checkScalar("array(row(1, 2), null)",
-        "[{1, 2}, null]", "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) ARRAY 
NOT NULL");
-    f2.checkScalar("array(null, row(1, 2))",
-        "[null, {1, 2}]", "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) ARRAY 
NOT NULL");
-    f2.checkScalar("array(row(1, null), row(2, null))", "[{1, null}, {2, 
null}]",
-        "RecordType(INTEGER NOT NULL EXPR$0, NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
-    f2.checkScalar("array(row(null, 1), row(null, 2))", "[{null, 1}, {null, 
2}]",
-        "RecordType(NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
-    f2.checkScalar("array(row(1, null), row(null, 2))", "[{1, null}, {null, 
2}]",
-        "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array(row(null, 1), row(2, null))", "[{null, 1}, {2, 
null}]",
-        "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array(row(1, 2), row(3, 4))", "[{1, 2}, {3, 4}]",
-        "RecordType(INTEGER NOT NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT NULL 
ARRAY NOT NULL");
-    // checkFails
-    f2.checkFails("^array(row(1), row(2, 3))^",
-        "Parameters must be of the same type", false);
-    f2.checkFails("^array(row(1), row(2, 3), null)^",
-        "Parameters must be of the same type", false);
-    // calcite default cast char type will fill extra spaces
-    f2.checkScalar("array(1, 2, 'Hi')",
-        "[1 , 2 , Hi]", "CHAR(2) NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array(1, 2, 'Hi', 'Hello')",
-        "[1    , 2    , Hi   , Hello]", "CHAR(5) NOT NULL ARRAY NOT NULL");
-    f2.checkScalar("array(1, 2, 'Hi', null)",
-        "[1 , 2 , Hi, null]", "CHAR(2) ARRAY NOT NULL");
-    f2.checkScalar("array(1, 2, 'Hi', cast(null as char(10)))",
-        "[1         , 2         , Hi        , null]", "CHAR(10) ARRAY NOT 
NULL");
+    final Consumer<SqlOperatorFixture> consumer = f2 -> {
+      f2.checkScalar("array('foo')",
+          "[foo]", "CHAR(3) NOT NULL ARRAY NOT NULL");
+      f2.checkScalar("array('foo', 'bar')",
+          "[foo, bar]", "CHAR(3) NOT NULL ARRAY NOT NULL");
+      f2.checkScalar("array()",
+          "[]", "UNKNOWN NOT NULL ARRAY NOT NULL");
+      f2.checkScalar("array('foo', null)",
+          "[foo, null]", "CHAR(3) ARRAY NOT NULL");
+      f2.checkScalar("array(null, 'foo')",
+          "[null, foo]", "CHAR(3) ARRAY NOT NULL");
+      f2.checkScalar("array(null)",
+          "[null]", "NULL ARRAY NOT NULL");
+      // check complex type
+      f2.checkScalar("array(row(1))", "[{1}]",
+          "RecordType(INTEGER NOT NULL EXPR$0) NOT NULL ARRAY NOT NULL");
+      f2.checkScalar("array(row(1, null))", "[{1, null}]",
+          "RecordType(INTEGER NOT NULL EXPR$0, NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
+      f2.checkScalar("array(row(null, 1))", "[{null, 1}]",
+          "RecordType(NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
+      f2.checkScalar("array(row(1, 2))", "[{1, 2}]",
+          "RecordType(INTEGER NOT NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT 
NULL ARRAY NOT NULL");
+      f2.checkScalar("array(row(1, 2), null)",
+          "[{1, 2}, null]", "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) ARRAY 
NOT NULL");
+      f2.checkScalar("array(null, row(1, 2))",
+          "[null, {1, 2}]", "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) ARRAY 
NOT NULL");
+      f2.checkScalar("array(row(1, null), row(2, null))", "[{1, null}, {2, 
null}]",
+          "RecordType(INTEGER NOT NULL EXPR$0, NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
+      f2.checkScalar("array(row(null, 1), row(null, 2))", "[{null, 1}, {null, 
2}]",
+          "RecordType(NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT NULL ARRAY NOT 
NULL");
+      f2.checkScalar("array(row(1, null), row(null, 2))", "[{1, null}, {null, 
2}]",
+          "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) NOT NULL ARRAY NOT 
NULL");
+      f2.checkScalar("array(row(null, 1), row(2, null))", "[{null, 1}, {2, 
null}]",
+          "RecordType(INTEGER EXPR$0, INTEGER EXPR$1) NOT NULL ARRAY NOT 
NULL");
+      f2.checkScalar("array(row(1, 2), row(3, 4))", "[{1, 2}, {3, 4}]",
+          "RecordType(INTEGER NOT NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT 
NULL ARRAY NOT NULL");
+      // checkFails
+      f2.checkFails("^array(row(1), row(2, 3))^",
+          "Parameters must be of the same type", false);
+      f2.checkFails("^array(row(1), row(2, 3), null)^",
+          "Parameters must be of the same type", false);
+      // calcite default cast char type will fill extra spaces
+      f2.checkScalar("array(1, 2, 'Hi')",
+          "[1 , 2 , Hi]", "CHAR(2) NOT NULL ARRAY NOT NULL");
+      f2.checkScalar("array(1, 2, 'Hi', 'Hello')",
+          "[1    , 2    , Hi   , Hello]", "CHAR(5) NOT NULL ARRAY NOT NULL");
+      f2.checkScalar("array(1, 2, 'Hi', null)",
+          "[1 , 2 , Hi, null]", "CHAR(2) ARRAY NOT NULL");
+      f2.checkScalar("array(1, 2, 'Hi', cast(null as char(10)))",
+          "[1         , 2         , Hi        , null]", "CHAR(10) ARRAY NOT 
NULL");
+    };
+    f.forEachLibrary(libraries, consumer);
   }
 
   @Test void testArrayQueryConstructor() {


Reply via email to