This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-29930
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/HBASE-29930 by this push:
     new ef70239c250 fix
ef70239c250 is described below

commit ef70239c250f36ac00619cb99fc39a02bc63b643
Author: Duo Zhang <[email protected]>
AuthorDate: Sun Mar 1 12:54:32 2026 +0800

    fix
---
 .../integration-test/integration-test.Jenkinsfile  | 23 +++++++++++++---------
 .../integration-test/pseudo-distributed-test.sh    |  4 ++--
 2 files changed, 16 insertions(+), 11 deletions(-)

diff --git a/dev-support/integration-test/integration-test.Jenkinsfile 
b/dev-support/integration-test/integration-test.Jenkinsfile
index 74caa7da79e..45e4fd063a0 100644
--- a/dev-support/integration-test/integration-test.Jenkinsfile
+++ b/dev-support/integration-test/integration-test.Jenkinsfile
@@ -239,12 +239,17 @@ pipeline {
                   hbase_install_dir="hbase-hadoop3-install"
                   hbase_client_dir="hbase-hadoop3-client"
                 fi
-                echo "Attempting to use run an instance on top of Hadoop 
${HADOOP_VERSION}."
+                if [[ ${HADOOP_VERSION} == 2.* ]]; then
+                  java_home = "/usr/lib/jvm/java-8"
+                else
+                  java_home = "/usr/lib/jvm/java-17"
+                fi
+                echo "Attempting to run an instance on top of Hadoop 
${HADOOP_VERSION}."
                 # Create working dir
                 rm -rf "${OUTPUT_DIR}/non-shaded" && mkdir 
"${OUTPUT_DIR}/non-shaded"
                 docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
                 docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-                  -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
+                  -u `id -u`:`id -g` -e JAVA_HOME="${java_home}" \
                   -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" 
\
                   --workdir=/hbase hbase-integration-test \
                   
component/dev-support/integration-test/pseudo-distributed-test.sh \
@@ -259,10 +264,10 @@ pipeline {
                   hadoop-install/bin/mapred \
                   >${OUTPUT_DIR}/hadoop.log 2>&1
                 if [ $? -ne 0 ]; then
-                  echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION}. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop.log]. (note that this means 
we didn't check the Hadoop ${HADOOP_VERSION} shaded client)" > 
${OUTPUT_DIR}/commentfile
+                  echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION}. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop.log]. (note that this means 
we didn't check the Hadoop ${HADOOP_VERSION} shaded client)" 
>${OUTPUT_DIR}/commentfile
                   exit 2
                 fi
-                echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} {color}" >> ${OUTPUT_DIR}/commentfile
+                echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} {color}" >${OUTPUT_DIR}/commentfile
                 if [[ ${HADOOP_VERSION} == 2.* ]] || [[ ${HADOOP_VERSION} == 
3.2.* ]]; then
                   echo "skip running shaded hadoop client test for 
${HADOOP_VERSION}"
                   exit 0
@@ -277,7 +282,7 @@ pipeline {
                   
component/dev-support/integration-test/pseudo-distributed-test.sh \
                   --single-process \
                   --hadoop-client-classpath 
hadoop-install/share/hadoop/client/hadoop-client-api-*.jar:hadoop-install/share/hadoop/client/hadoop-client-runtime-*.jar
 \
-                  --working-dir ${OUTPUT_DIR}/shade \
+                  --working-dir ${OUTPUT_DIR}/shaded \
                   --hbase-client-install ${hbase_client_dir} \
                   ${hbase_install_dir} \
                   hadoop-install/bin/hadoop \
@@ -310,12 +315,12 @@ pipeline {
       script {
         def results = []
         results.add('output-srctarball/commentfile')
-        for (hadoop_version in getHadoopVersions(env.HADOOP_VERSIONS)) {
+        for (hadoopVersion in getHadoopVersions(env.HADOOP_VERSIONS)) {
           try {
-            unstash "test-result-${hadoop_version}"
-            
results.add("output-integration-hadoop-${hadoop_version}/commentfile")
+            unstash "test-result-${hadoopVersion}"
+            
results.add("output-integration-hadoop-${hadoopVersion}/commentfile")
           } catch (e) {
-            echo "unstash ${hadoop-version} failed, ignore"
+            echo "unstash ${hadoopVersion} failed, ignore"
           }
         }
         echo env.BRANCH_NAME
diff --git a/dev-support/integration-test/pseudo-distributed-test.sh 
b/dev-support/integration-test/pseudo-distributed-test.sh
index 099774d25b4..3089b6db307 100755
--- a/dev-support/integration-test/pseudo-distributed-test.sh
+++ b/dev-support/integration-test/pseudo-distributed-test.sh
@@ -417,7 +417,7 @@ HADOOP_CLASSPATH="${hbase_dep_classpath}" redirect_and_run 
"${working_dir}/mr-im
 EOF
 
 echo "Verifying row count from import."
-import_rowcount=$(echo 'count "test:example"' | "${hbase_client}/bin/hbase" 
--config "${working_dir}/hbase-conf/" shell --noninteractive 2>/dev/null | tail 
-n 1)
+import_rowcount=$(echo 'count "test:example"' | "${hbase_client}/bin/hbase" 
--config "${working_dir}/hbase-conf/" shell --noninteractive 2>/dev/null | grep 
"row(s)" | awk '{print $1}')
 if [ ! "${import_rowcount}" -eq 48 ]; then
   echo "ERROR: Instead of finding 48 rows, we found ${import_rowcount}."
   exit 2
@@ -530,7 +530,7 @@ echo "Checking on results of example program."
 EOF
 
 echo "Verifying row count from example."
-example_rowcount=$(echo 'count "test:example"' | "${hbase_client}/bin/hbase" 
--config "${working_dir}/hbase-conf/" shell --noninteractive 2>/dev/null | tail 
-n 1)
+example_rowcount=$(echo 'count "test:example"' | "${hbase_client}/bin/hbase" 
--config "${working_dir}/hbase-conf/" shell --noninteractive 2>/dev/null | grep 
"row(s)" | awk '{print $1}')
 if [ "${example_rowcount}" -gt "1049" ]; then
   echo "Found ${example_rowcount} rows, which is enough to cover 48 for 
import, 1000 example's use of user table regions, 1 for example's use of meta 
region, and 1 for example's count record"
 else

Reply via email to