This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-29930
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/HBASE-29930 by this push:
     new 246117436cb add
246117436cb is described below

commit 246117436cbdfd4d69cdb9878f17468d68fddd0f
Author: Duo Zhang <[email protected]>
AuthorDate: Sun Mar 1 00:19:01 2026 +0800

    add
---
 .../integration-test/integration-test.Jenkinsfile  | 110 ++++++++++++++++++---
 1 file changed, 97 insertions(+), 13 deletions(-)

diff --git a/dev-support/integration-test/integration-test.Jenkinsfile 
b/dev-support/integration-test/integration-test.Jenkinsfile
index 647a30a3e69..a22cd43d764 100644
--- a/dev-support/integration-test/integration-test.Jenkinsfile
+++ b/dev-support/integration-test/integration-test.Jenkinsfile
@@ -31,9 +31,7 @@ pipeline {
     disableConcurrentBuilds()
   }
   environment {
-    YETUS_RELEASE = '0.15.0'
-    HADOOP2_VERSIONS = "2.10.2"
-    HADOOP3_VERSIONS = "3.3.5,3.3.6,3.4.0,3.4.1,3.4.2,3.4.3"
+    HADOOP_VERSIONS = "2.10.2,3.2.4,3.3.5,3.3.6,3.4.0,3.4.1,3.4.2,3.4.3"
     BASEDIR = "${env.WORKSPACE}/component"
   }
   parameters {
@@ -112,13 +110,28 @@ pipeline {
             name 'HADOOP_VERSION'
             // matrix does not support dynamic axis values, so here we need to 
keep align with the
             // above environment
-            values "2.10.2","3.3.5","3.3.6","3.4.0","3.4.1","3.4.2","3.4.3"
+            values 
"2.10.2","3.2.4","3.3.5","3.3.6","3.4.0","3.4.1","3.4.2","3.4.3"
           }
         }
         environment {
           BASEDIR = "${env.WORKSPACE}/component"
           OUTPUT_DIR = "output-integration-hadoop-${env.HADOOP_VERSION}"
         }
+        when {
+          expression {
+            if (HADOOP_VERSION == '2.10.2') {
+              // only branch-2/branch-2.x need to run against hadoop2, here we 
also includes
+              // HBASE-XXXXX-branch-2 feature branch
+              return env.BRANCH_NAME.contains('branch-2')
+            }
+            if (HADOOP_VERSION == '3.2.4') {
+              // only branch-2.5 need to run against hadoop 3.2.4, here we 
also includes
+              // HBASE-XXXXX-branch-2.5 feature branch
+              return env.BRANCH_NAME.contains('branch-2.5')
+            }
+            return true
+          }
+        }
         stages {
           stage('scm-checkout') {
             steps {
@@ -127,8 +140,8 @@ pipeline {
                 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
                 echo "(x) {color:red}-1 client integration test{color}\n-- 
Something went wrong with this stage, [check relevant console 
output|${BUILD_URL}/console]." >${OUTPUT_DIR}/commentfile
                 rm -rf "unpacked_src_tarball"
-                rm -rf "hbase-install"
-                rm -rf "hbase-client"
+                rm -rf "hbase-install" && mkdir "hbase-install"
+                rm -rf "hbase-client" && mkdir "hbase-client"
                 rm -rf "hbase-hadoop3-install"
                 rm -rf "hbase-hadoop3-client"
                 # remove old hadoop tarballs in workspace
@@ -139,7 +152,7 @@ pipeline {
               }
             } // steps
           } // scm-checkout
-          stage('download hadoop') {
+          stage('install hadoop') {
             steps {
               dir("downloads-hadoop") {
                 sh '''#!/bin/bash -e
@@ -157,11 +170,18 @@ pipeline {
                     echo "Delete stale hadoop cache ${stale}"
                     rm -rf $stale
                   done
+                  rm -rf "${WORKSPACE}/hadoop-install/*"
+                  artifact=$(ls -1 
"${WORKSPACE}"/hadoop-${HADOOP_VERSION}-bin.tar.gz | head -n 1)
+                  tar --strip-components=1 -xzf "${artifact}" -C 
"${WORKSPACE}/hadoop-install"
+                  if [[ ${HADOOP_VERSION} == 3.* ]]; then
+                    # we need to patch some files otherwise minicluster will 
fail to start, see MAPREDUCE-7471
+                    ${BASEDIR}/dev-support/integration-test/patch-hadoop3.sh 
"${WORKSPACE}/hadoop-install"
+                  fi
                 '''
               } // dir
             } // steps
-          } // download hadoop
-          stage('integration test ') {
+          } // install hadoop
+          stage('install hbase') {
             steps {
               unstash 'hbase-install'
               sh'''#!/bin/bash -e
@@ -180,6 +200,71 @@ pipeline {
                 fi
               '''
             } // steps
+          }
+          stage('integration test ') {
+            steps {
+              sh '''#!/bin/bash -e
+                hbase_install_dir="hbase-install"
+                hbase_client_dir="hbase-client"
+                if [[ ${HADOOP_VERSION} == 3.* ]] && [[ -d 
"hbase-hadoop3-install" ]]; then
+                  echo "run hadoop3 client integration test against hbase 
hadoop3 binaries"
+                  hbase_install_dir="hbase-hadoop3-install"
+                  hbase_client_dir="hbase-hadoop3-client"
+                fi
+                echo "Attempting to use run an instance on top of Hadoop 
${HADOOP_VERSION}."
+                # Create working dir
+                rm -rf "${OUTPUT_DIR}/non-shaded" && mkdir 
"${OUTPUT_DIR}/non-shaded"
+                docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+                docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                  -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
+                  -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" 
\
+                  --workdir=/hbase hbase-integration-test \
+                  
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
+                  --single-process \
+                  --working-dir ${OUTPUT_DIR}/non-shaded \
+                  --hbase-client-install ${hbase_client_dir} \
+                  ${hbase_install_dir} \
+                  hadoop-install/bin/hadoop \
+                  hadoop-install/share/hadoop/yarn/timelineservice \
+                  
hadoop-install/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+                  
hadoop-install/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar
 \
+                  hadoop-install/bin/mapred \
+                  >${OUTPUT_DIR}/hadoop.log 2>&1
+                if [ $? -ne 0 ]; then
+                  echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION}. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop.log]. (note that this means 
we didn't check the Hadoop ${HADOOP_VERSION} shaded client)" > 
${OUTPUT_DIR}/commentfile
+                  exit 2
+                fi
+                echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} {color}" >> ${OUTPUT_DIR}/commentfile
+                if [[ ${HADOOP_VERSION} == 2.* ]] || [[ ${HADOOP_VERSION} == 
3.2.* ]]; then
+                  echo "skip running shaded hadoop client test for 
${HADOOP_VERSION}"
+                  exit 0
+                fi
+                # Create working dir
+                rm -rf "${OUTPUT_DIR}/shaded" && mkdir "${OUTPUT_DIR}/shaded"
+                echo "Attempting to run an instance on top of Hadoop 
${HADOOP_VERSION}, relying on the Hadoop client artifacts for the example 
client program."
+                docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                  -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
+                  -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" 
\
+                  --workdir=/hbase hbase-integration-test \
+                  
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
+                  --single-process \
+                  --hadoop-client-classpath 
hadoop-install/share/hadoop/client/hadoop-client-api-*.jar:hadoop-install/share/hadoop/client/hadoop-client-runtime-*.jar
 \
+                  --working-dir ${OUTPUT_DIR}/shade \
+                  --hbase-client-install ${hbase_client_dir} \
+                  ${hbase_install_dir} \
+                  hadoop-install/bin/hadoop \
+                  hadoop-install/share/hadoop/yarn/timelineservice \
+                  
hadoop-install/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+                  
hadoop-install/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar
 \
+                  hadoop-install/bin/mapred \
+                  >${OUTPUT_DIR}/hadoop-shaded.log 2>&1
+                if [ $? -ne 0 ]; then
+                  echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION} using Hadoop's shaded client. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop-shaded.log]." >> 
${OUTPUT_DIR}/commentfile
+                  exit 2
+                fi
+                echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} with shaded hadoop client {color}" >> 
${OUTPUT_DIR}/commentfile
+              '''
+            } // steps
           } // integration test
         } // stages
       } // matrix
@@ -217,7 +302,7 @@ pipeline {
         def results = []
         results.add('output-srctarball/commentfile')
         
results.add("output-integration-hadoop-${env.HADOOP_VERSION}/commentfile")
-        for (hadoop_version in getHadoopVersions(env.HADOOP2_VERSIONS, 
env.HADOOP3_VERSIONS)) {
+        for (hadoop_version in getHadoopVersions(env.HADOOP_VERSIONS)) {
           
results.add("output-integration-hadoop-${hadoop_version}/commentfile")
         }
         echo env.BRANCH_NAME
@@ -255,9 +340,8 @@ pipeline {
 }
 
 @NonCPS
-List<String> getHadoopVersions(String hadoop2Versions, String hadoop3Versions) 
{
-  return hadoop2Versions.split(',').collect { it.trim() }.findAll { it } as 
String[]
-    + hadoop3Versions.split(',').collect { it.trim() }.findAll { it } as 
String[]
+List<String> getHadoopVersions(String versions) {
+  return versions.split(',').collect { it.trim() }.findAll { it } as String[]
 }
 
 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper

Reply via email to