This is an automated email from the ASF dual-hosted git repository.

csringhofer pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit 6f2d9a24d8c014a7dc1ec7a08bcfb025b3bdf41f
Author: Csaba Ringhofer <[email protected]>
AuthorDate: Wed Mar 12 18:16:54 2025 +0100

    IMPALA-13920: Allow running minicluster with Java 17
    
    IMPALA-11941 allowed building Impala and running tests with Java 17,
    but it still uses Java 8 for minicluster components (e.g. Hadoop) and
    skips several tests that would restart Hive. It should be possible to
    use 17 for everything to be able to deprecate Java 8.
    
    This patch mainly fixes Yarn+Hive+Tez startup issues with java 17 by
    setting JAVA_TOOL_OPTIONS.
    
    Another issues fixed is KuduHMSIntegrationTest: this test fails to
    restart Kudu due to a bug in OpenJDK (see IMPALA-13856). The current
    fix is to remove LD_PRELOAD to avoid loading libjsig (similarly to
    the case when MINICLUSTER_JAVA_HOME is set). This works, but it
    would be nice to clean up this area in a future patch.
    
    Testing:
    - ran exhaustive tests with Java 17
    - ran core tests with default Java 8
    
    Change-Id: If58b64a21d14a4a55b12dfe9ea0b9c3d5fe9c9cf
    Reviewed-on: http://gerrit.cloudera.org:8080/22705
    Tested-by: Impala Public Jenkins <[email protected]>
    Reviewed-by: Riza Suminto <[email protected]>
    Reviewed-by: Michael Smith <[email protected]>
---
 bin/bootstrap_system.sh                            | 10 +++-
 bin/create-test-configuration.sh                   |  3 +
 bin/impala-config-java.sh                          |  6 ++
 bin/run-all-tests.sh                               | 47 ++-------------
 bin/set-impala-java-tool-options.sh                | 66 ++++++++++++++++++++++
 fe/pom.xml                                         |  2 +
 .../customservice/KuduHMSIntegrationTest.java      |  7 +++
 .../java/org/apache/impala/testutil/TestUtils.java | 14 +++++
 fe/src/test/resources/hive-site.xml.py             | 24 ++++----
 testdata/bin/run-hive-server.sh                    |  3 +
 testdata/bin/run-mini-dfs.sh                       |  3 +
 .../test_kudu_table_create_without_hms.py          |  2 +-
 tests/metadata/test_hms_integration.py             | 11 +++-
 13 files changed, 142 insertions(+), 56 deletions(-)

diff --git a/bin/bootstrap_system.sh b/bin/bootstrap_system.sh
index 9a9996868..ed5d44564 100755
--- a/bin/bootstrap_system.sh
+++ b/bin/bootstrap_system.sh
@@ -272,10 +272,18 @@ if [[ $ARCH_NAME == 'aarch64' ]]; then
           libncurses5-dev libreadline-dev
 fi
 
+ubuntu sudo update-java-alternatives -l || true
+
 # Configure the default Java version to be the version we selected.
-ubuntu sudo update-java-alternatives -s \
+ubuntu sudo update-java-alternatives -v -s \
     java-1.${UBUNTU_JAVA_VERSION}.0-openjdk-${UBUNTU_PACKAGE_ARCH}
 
+# update-java-alternatives may not take effect if there is a Java in PATH
+which java
+java -version
+which javac
+javac -version
+
 redhat sudo yum install -y file gawk gcc gcc-c++ git krb5-devel krb5-server \
         krb5-workstation libevent-devel libffi-devel make openssl-devel 
cyrus-sasl \
         cyrus-sasl-gssapi cyrus-sasl-devel cyrus-sasl-plain \
diff --git a/bin/create-test-configuration.sh b/bin/create-test-configuration.sh
index cb4489149..323da9669 100755
--- a/bin/create-test-configuration.sh
+++ b/bin/create-test-configuration.sh
@@ -132,6 +132,9 @@ rm -f authz-provider.ini
 # Generate hive configs first so that schemaTool can be used to init the 
metastore schema
 # if needed
 
+# Set IMPALA_JAVA_TOOL_OPTIONS to allow passing it to Tez containers.
+. $IMPALA_HOME/bin/set-impala-java-tool-options.sh
+
 $IMPALA_HOME/bin/generate_xml_config.py hive-site.xml.py hive-site.xml
 export HIVE_VARIANT=changed_external_dir
 $IMPALA_HOME/bin/generate_xml_config.py hive-site.xml.py hive-site_ext.xml
diff --git a/bin/impala-config-java.sh b/bin/impala-config-java.sh
index 276d37f85..48da4d718 100644
--- a/bin/impala-config-java.sh
+++ b/bin/impala-config-java.sh
@@ -21,9 +21,11 @@ IMPALA_JDK_VERSION=${IMPALA_JDK_VERSION:-system}
 if [[ "${IMPALA_JDK_VERSION}" == "system" || "${IMPALA_JDK_VERSION}" == "8" 
]]; then
   UBUNTU_JAVA_VERSION=8
   REDHAT_JAVA_VERSION=1.8.0
+  export IMPALA_JDK_VERSION_NUM=8
 else
   UBUNTU_JAVA_VERSION="${IMPALA_JDK_VERSION}"
   REDHAT_JAVA_VERSION="${IMPALA_JDK_VERSION}"
+  export IMPALA_JDK_VERSION_NUM="${IMPALA_JDK_VERSION}"
 fi
 
 if [[ "$(uname -p)" == 'aarch64' ]]; then
@@ -31,3 +33,7 @@ if [[ "$(uname -p)" == 'aarch64' ]]; then
 else
   UBUNTU_PACKAGE_ARCH='amd64'
 fi
+
+echo "JAVA_HOME: ${JAVA_HOME:-}"
+echo "IMPALA_JDK_VERSION: $IMPALA_JDK_VERSION"
+echo "IMPALA_JDK_VERSION_NUM: $IMPALA_JDK_VERSION_NUM"
diff --git a/bin/run-all-tests.sh b/bin/run-all-tests.sh
index 62fbdff56..91706a034 100755
--- a/bin/run-all-tests.sh
+++ b/bin/run-all-tests.sh
@@ -285,49 +285,11 @@ do
 
     # Add Jamm as javaagent for CatalogdMetaProviderTest.testWeights
     JAMM_JAR=$(compgen -G ${IMPALA_HOME}/fe/target/dependency/jamm-*.jar)
+    PREV_JAVA_TOOL_OPTIONS="${JAVA_TOOL_OPTIONS-}"
     export JAVA_TOOL_OPTIONS="${JAVA_TOOL_OPTIONS-} -javaagent:${JAMM_JAR}"
 
-    if $JAVA -version 2>&1 | grep -q -E ' version "(9|[1-9][0-9])\.'; then
-      # If running with Java 9+, add-opens to JAVA_TOOL_OPTIONS for
-      # CatalogdMetaProviderTest.testWeights with ehcache.sizeof.
-      JAVA_OPTIONS=" --add-opens=java.base/java.io=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.lang.invoke=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.lang.module=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.lang.ref=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.lang.reflect=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.lang=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.net=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.nio.charset=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=java.base/java.nio.file.attribute=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.nio=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.security=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.util.concurrent=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.util.jar=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.util.regex=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.util.zip=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/java.util=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.loader=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.math=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.module=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.perf=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.platform=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=java.base/jdk.internal.platform.cgroupv1=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.reflect=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/jdk.internal.util.jar=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/sun.nio.ch=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=java.base/sun.nio.fs=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=jdk.dynalink/jdk.dynalink.beans=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=jdk.dynalink/jdk.dynalink.linker.support=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=jdk.dynalink/jdk.dynalink.linker=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=jdk.dynalink/jdk.dynalink.support=ALL-UNNAMED"
-      JAVA_OPTIONS+=" --add-opens=jdk.dynalink/jdk.dynalink=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=jdk.management.jfr/jdk.management.jfr=ALL-UNNAMED"
-      JAVA_OPTIONS+=" 
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED"
-      export JAVA_TOOL_OPTIONS="$JAVA_OPTIONS ${JAVA_TOOL_OPTIONS-}"
-    fi
+    . $IMPALA_HOME/bin/set-impala-java-tool-options.sh
+    export JAVA_TOOL_OPTIONS="$IMPALA_JAVA_TOOL_OPTIONS ${JAVA_TOOL_OPTIONS-}"
 
     MVN_ARGS=""
     if [[ "${TARGET_FILESYSTEM}" == "s3" ]]; then
@@ -356,6 +318,9 @@ do
       start_impala_cluster
     fi
     popd
+
+    # Restore old (likely empty) JAVA_TOOL_OPTIONS to avoid polluting other 
tests.
+    export JAVA_TOOL_OPTIONS=$PREV_JAVA_TOOL_OPTIONS
   fi
 
   if [[ "$EE_TEST" == true ]]; then
diff --git a/bin/set-impala-java-tool-options.sh 
b/bin/set-impala-java-tool-options.sh
new file mode 100755
index 000000000..de1d16947
--- /dev/null
+++ b/bin/set-impala-java-tool-options.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Set JAVA_TOOL_OPTIONS needed by some Hadoop dependencies when running with 
JDK>=9
+# TODO: check if this is still needed once these deps are build with JDK17 
(IMPALA-13922)
+
+export IMPALA_JAVA_TOOL_OPTIONS="${IMPALA_JAVA_TOOL_OPTIONS:-}"
+
+if (( IMPALA_JDK_VERSION_NUM > 8 )); then
+  echo "JDK >= 9 detected, adding --add-opens to IMPALA_JAVA_TOOL_OPTIONS"
+  ADD_OPENS_OPTS=" --add-opens=java.base/java.io=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.lang.invoke=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.lang.module=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.lang.ref=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.lang.reflect=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.lang=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.net=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.nio.charset=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.nio.file.attribute=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.nio=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.security=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" 
--add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" 
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.util.concurrent=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.util.jar=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.util.regex=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.util.zip=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/java.util=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.loader=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.math=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.module=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.perf=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.platform=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" 
--add-opens=java.base/jdk.internal.platform.cgroupv1=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.reflect=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/jdk.internal.util.jar=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/sun.nio.ch=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=java.base/sun.nio.fs=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=jdk.dynalink/jdk.dynalink.beans=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" 
--add-opens=jdk.dynalink/jdk.dynalink.linker.support=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=jdk.dynalink/jdk.dynalink.linker=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=jdk.dynalink/jdk.dynalink.support=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" --add-opens=jdk.dynalink/jdk.dynalink=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" 
--add-opens=jdk.management.jfr/jdk.management.jfr=ALL-UNNAMED"
+  ADD_OPENS_OPTS+=" 
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED"
+  export IMPALA_JAVA_TOOL_OPTIONS="$IMPALA_JAVA_TOOL_OPTIONS $ADD_OPENS_OPTS"
+fi
+
diff --git a/fe/pom.xml b/fe/pom.xml
index 3d7f06cfe..c07d78942 100644
--- a/fe/pom.xml
+++ b/fe/pom.xml
@@ -790,6 +790,8 @@ under the License.
                  * LD_LIBRARY_PATH needs to include the GCC libraries that 
libfesupport.so
                    was built against.
                  * LD_PRELOAD needs to include libjsig.so for JVM/native 
signal handling.
+                   TODO(IMPALA-13856): remove libjsig.so if possible as it 
causes issues
+                                       with Java 17
             -->
             <LD_LIBRARY_PATH>
               
${env.IMPALA_TOOLCHAIN_PACKAGES_HOME}/gcc-${env.IMPALA_GCC_VERSION}/lib64
diff --git 
a/fe/src/test/java/org/apache/impala/customservice/KuduHMSIntegrationTest.java 
b/fe/src/test/java/org/apache/impala/customservice/KuduHMSIntegrationTest.java
index 37a099429..f47d8fe1f 100644
--- 
a/fe/src/test/java/org/apache/impala/customservice/KuduHMSIntegrationTest.java
+++ 
b/fe/src/test/java/org/apache/impala/customservice/KuduHMSIntegrationTest.java
@@ -25,7 +25,10 @@ import org.apache.impala.analysis.AuditingKuduTest;
 import org.apache.impala.analysis.ParserTest;
 import org.apache.impala.analysis.ToSqlTest;
 import org.apache.impala.customservice.CustomServiceRunner;
+import org.apache.impala.testutil.TestUtils;
+
 import org.junit.AfterClass;
+import org.junit.Assume;
 import org.junit.BeforeClass;
 import org.junit.runner.RunWith;
 import org.junit.runners.Suite;
@@ -56,7 +59,11 @@ public class KuduHMSIntegrationTest {
       envp.removeIf(s -> s.startsWith("JAVA_HOME="));
       envp.add("JAVA=" + altJavaHome + "/bin/java");
       envp.add("JAVA_HOME=" + altJavaHome);
+    } else if (TestUtils.getJavaMajorVersion() >= 17) {
+      // Skip loading libjsig (IMPALA-13856).
+      envp.removeIf(s -> s.startsWith("LD_PRELOAD="));
     }
+
     int exitVal = CustomServiceRunner.RestartMiniclusterComponent(
         "kudu", envp.toArray(new String[envp.size()]));
     assertEquals(0, exitVal);
diff --git a/fe/src/test/java/org/apache/impala/testutil/TestUtils.java 
b/fe/src/test/java/org/apache/impala/testutil/TestUtils.java
index eba1e68d3..665a1b97d 100644
--- a/fe/src/test/java/org/apache/impala/testutil/TestUtils.java
+++ b/fe/src/test/java/org/apache/impala/testutil/TestUtils.java
@@ -485,4 +485,18 @@ public class TestUtils {
   public static String getRandomString(int size) {
     return RandomStringUtils.randomAlphanumeric(size);
   }
+
+  public static int getJavaMajorVersion() {
+    String version = System.getProperty("java.version");
+    if(version.startsWith("1.")) {
+        version = version.substring(2, 3);
+    } else {
+        int dot = version.indexOf(".");
+        if (dot != -1) { version = version.substring(0, dot); }
+    }
+    int result = Integer.parseInt(version);
+    // Impala tests shouldn't see java version < 8.
+    Preconditions.checkState(result >= 8);
+    return result;
+  }
 }
diff --git a/fe/src/test/resources/hive-site.xml.py 
b/fe/src/test/resources/hive-site.xml.py
index 3b241013d..6f5aa04ed 100644
--- a/fe/src/test/resources/hive-site.xml.py
+++ b/fe/src/test/resources/hive-site.xml.py
@@ -20,9 +20,10 @@
 from __future__ import absolute_import, division, print_function
 import os
 
-hive_major_version = int(os.environ['IMPALA_HIVE_VERSION'][0])
-kerberize = os.environ.get('IMPALA_KERBERIZE') == 'true'
-variant = os.environ.get('HIVE_VARIANT')
+HIVE_MAJOR_VERSION = int(os.environ['IMPALA_HIVE_VERSION'][0])
+KERBERIZE = os.environ.get('IMPALA_KERBERIZE') == 'true'
+VARIANT = os.environ.get('HIVE_VARIANT')
+IMPALA_JAVA_TOOL_OPTIONS=os.environ.get("IMPALA_JAVA_TOOL_OPTIONS")
 
 CONFIG = {
   'dfs.replication': '3'
@@ -71,24 +72,24 @@ CONFIG.update({
   'hive.metastore.partitions.parameters.exclude.pattern': '""',
 })
 
-if variant == 'changed_external_dir':
+if VARIANT == 'changed_external_dir':
   CONFIG.update({
     'hive.metastore.warehouse.external.dir': 
'${WAREHOUSE_LOCATION_PREFIX}/test-warehouse-external',
   })
-elif variant == 'ranger_auth':
+elif VARIANT == 'ranger_auth':
   CONFIG.update({
     'hive.security.authorization.manager':
         
'org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizerFactory',
     'hive.metastore.pre.event.listeners':
         
'org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizer',
   })
-elif variant == 'events_cleanup':
+elif VARIANT == 'events_cleanup':
   # HMS configs needed for regression test for IMPALA-11028
   CONFIG.update({
     'hive.metastore.event.db.listener.timetolive': '60s',
     'hive.metastore.event.db.listener.clean.interval': '10s'
   })
-elif variant == 'housekeeping_on':
+elif VARIANT == 'housekeeping_on':
   # HMS configs needed for regression test for IMPALA-12827
   CONFIG.update({
     'hive.metastore.housekeeping.threads.on': 'true',
@@ -100,7 +101,7 @@ CONFIG.update({
   'hive.cluster.delegation.token.store.zookeeper.connectString': 
'${INTERNAL_LISTEN_HOST}:2181',
 })
 
-if kerberize:
+if KERBERIZE:
   CONFIG.update({
    'hive.server2.authentication.kerberos.keytab': '${KRB5_KTNAME}',
    'hive.server2.authentication.kerberos.principal': '${MINIKDC_PRINC_HIVE}',
@@ -119,7 +120,7 @@ if kerberize:
   #   hive.metastore.kerberos.principal
 
 # Enable Tez, ACID and proleptic Gregorian calendar DATE types for Hive 3
-if hive_major_version >= 3:
+if HIVE_MAJOR_VERSION >= 3:
   CONFIG.update({
    'hive.execution.engine': 'tez',
    'hive.tez.container.size': '512',
@@ -129,6 +130,9 @@ if hive_major_version >= 3:
    'tez.use.cluster.hadoop-libs': 'true',
    'tez.am.tez-ui.webservice.port-range': '32000-32100',
 
+   'tez.task.launch.cluster-default.cmd-opts': IMPALA_JAVA_TOOL_OPTIONS,
+   'tez.am.launch.cluster-default.cmd-opts': IMPALA_JAVA_TOOL_OPTIONS,
+
    # Some of the tests change the columns in a incompatible manner
    # (eg. string to timestamp) this is disallowed by default in Hive-3 which 
causes
    # these tests to fail. We disable this behavior in minicluster to keep 
running the
@@ -211,7 +215,7 @@ CONFIG.update({
   'iceberg.catalog.ice_hadoop_cat.warehouse': 
'${WAREHOUSE_LOCATION_PREFIX}/test-warehouse/ice_hadoop_cat',
 })
 
-if variant == 'without_hms_config':
+if VARIANT == 'without_hms_config':
   CONFIG.clear()
 
 # Database and JDO-related configs:
diff --git a/testdata/bin/run-hive-server.sh b/testdata/bin/run-hive-server.sh
index b4970e0da..04564132a 100755
--- a/testdata/bin/run-hive-server.sh
+++ b/testdata/bin/run-hive-server.sh
@@ -34,6 +34,9 @@ RESTART_SERVICE=1
 
 CLUSTER_BIN=${IMPALA_HOME}/testdata/bin
 
+. $IMPALA_HOME/bin/set-impala-java-tool-options.sh
+export JAVA_TOOL_OPTIONS="$IMPALA_JAVA_TOOL_OPTIONS ${JAVA_TOOL_OPTIONS-}"
+
 if ${CLUSTER_DIR}/admin is_kerberized; then
     # Making a kerberized cluster... set some more environment variables.
     . ${MINIKDC_ENV}
diff --git a/testdata/bin/run-mini-dfs.sh b/testdata/bin/run-mini-dfs.sh
index be63715ef..d6e19431c 100755
--- a/testdata/bin/run-mini-dfs.sh
+++ b/testdata/bin/run-mini-dfs.sh
@@ -31,6 +31,9 @@ else
   SHOULD_FORMAT=false
 fi
 
+. $IMPALA_HOME/bin/set-impala-java-tool-options.sh
+export JAVA_TOOL_OPTIONS="$IMPALA_JAVA_TOOL_OPTIONS ${JAVA_TOOL_OPTIONS-}"
+
 # Kill and clean data for a clean start.
 $IMPALA_HOME/testdata/bin/kill-mini-dfs.sh
 
diff --git a/tests/custom_cluster/test_kudu_table_create_without_hms.py 
b/tests/custom_cluster/test_kudu_table_create_without_hms.py
index 1b3084ee8..24f78a867 100644
--- a/tests/custom_cluster/test_kudu_table_create_without_hms.py
+++ b/tests/custom_cluster/test_kudu_table_create_without_hms.py
@@ -30,7 +30,7 @@ TBL_NAME = "test_kudu_table_create_without_hms"
 class TestCreatingKuduTableWithoutHMS(CustomClusterTestSuite):
   """Test creating kudu managed table without hms"""
 
-  @SkipIfHive3.without_hms_not_supported
+  @SkipIfHive3.without_hms_not_supported  # TODO: will we ever support this?
   @SkipIf.is_test_jdk
   @pytest.mark.execute_serially
   @CustomClusterTestSuite.with_args(hive_conf_dir=HIVE_SITE_WITHOUT_HMS_DIR)
diff --git a/tests/metadata/test_hms_integration.py 
b/tests/metadata/test_hms_integration.py
index 42b02884a..e7fe4e141 100644
--- a/tests/metadata/test_hms_integration.py
+++ b/tests/metadata/test_hms_integration.py
@@ -25,6 +25,7 @@
 
 from __future__ import absolute_import, division, print_function
 from builtins import range
+import os
 import pytest
 import random
 import string
@@ -788,10 +789,14 @@ class TestHmsIntegration(ImpalaTestSuite):
         # Modify HMS table metadata again, change the type of column 'y' back 
to INT.
         self.run_stmt_in_hive('alter table %s change y y int' % table_name)
         # Neither Hive 2 and 3, nor Impala converts STRINGs to INTs implicitly.
+        err_msg = ("{0}org.apache.hadoop.io.Text cannot be "
+                  "cast to {0}org.apache.hadoop.io.IntWritable")
+        # The error message is different in newer Javas than in 17
+        # TODO: find out which version changed it exactly
+        err_msg = err_msg.format(
+            "class " if os.environ.get('IMPALA_JDK_VERSION_NUM') >= 17 else "")
         self.assert_sql_error(
-            self.run_stmt_in_hive, 'select * from %s' % table_name,
-            'org.apache.hadoop.io.Text cannot be '
-            'cast to org.apache.hadoop.io.IntWritable')
+            self.run_stmt_in_hive, 'select * from %s' % table_name, err_msg)
         self.client.execute('invalidate metadata %s' % table_name)
         self.assert_sql_error(
             self.client.execute, 'select * from %s' % table_name,

Reply via email to