This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-29930
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/HBASE-29930 by this push:
     new c2ea371bc12 fix
c2ea371bc12 is described below

commit c2ea371bc129df8ea01405b3ba1b534c6799a761
Author: Duo Zhang <[email protected]>
AuthorDate: Sat Feb 28 00:31:25 2026 +0800

    fix
---
 dev-support/integration-test.Jenkinsfile | 444 +++++++++++++++----------------
 1 file changed, 222 insertions(+), 222 deletions(-)

diff --git a/dev-support/integration-test.Jenkinsfile 
b/dev-support/integration-test.Jenkinsfile
index 762abcb51af..922f56e10e1 100644
--- a/dev-support/integration-test.Jenkinsfile
+++ b/dev-support/integration-test.Jenkinsfile
@@ -5,7 +5,7 @@ pipeline {
   }
   options {
     buildDiscarder(logRotator(numToKeepStr: '20'))
-    timeout (time: 16, unit: 'HOURS')
+    timeout(time: 16, unit: 'HOURS')
     timestamps()
     skipDefaultCheckout()
     disableConcurrentBuilds()
@@ -19,13 +19,13 @@ pipeline {
     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
   }
   stages {
-  stage ('thirdparty installs') {
-    agent {
-      node {
-        label 'hbase'
+    stage('thirdparty installs') {
+      agent {
+        node {
+          label 'hbase'
+        }
       }
-    }
-    steps {
+      steps {
         dir('component') {
           checkout scm
         }
@@ -72,7 +72,7 @@ pipeline {
                 for (hadoop3_version in hadoop3_versions) {
                   env.HADOOP3_VERSION = hadoop3_version;
                   echo "env.HADOOP3_VERSION" + env.hadoop3_version;
-                  stage ('Hadoop 3 cache inner stage') {
+                  stage('Hadoop 3 cache inner stage') {
                     dir("downloads-hadoop-${HADOOP3_VERSION}") {
                       sh '''#!/usr/bin/env bash
                         echo "Make sure we have a directory for downloading 
dependencies: $(pwd)"
@@ -99,242 +99,239 @@ pipeline {
             }
           }
         }
-    }
-  } // stage ('thirdparty installs')
-  // This is meant to mimic what a release manager will do to create RCs.
-  // See http://hbase.apache.org/book.html#maven.release
-  // TODO (HBASE-23870): replace this with invocation of the release tool
-  stage ('packaging test') {
-    agent {
-      node {
-        label 'hbase'
       }
     }
-    environment {
-      BASEDIR = "${env.WORKSPACE}/component"
-    }
-    steps {
-      dir('component') {
-        checkout scm
-      }
-      sh '''#!/bin/bash -e
-        echo "Setting up directories"
-        rm -rf "output-srctarball" && mkdir "output-srctarball"
-        rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
-        rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
-        rm -rf ".m2-for-src" && mkdir ".m2-for-src"
-      '''
-      sh '''#!/usr/bin/env bash
-        set -e
-        rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
-        "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"output-srctarball/machine"
-        echo "got the following saved stats in 'output-srctarball/machine'"
-        ls -lh "output-srctarball/machine"
-      '''
-      sh '''#!/bin/bash -e
-        echo "Checking the steps for an RM to make a source artifact, then a 
binary artifact."
-        docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
-        docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro 
-v /etc/group:/etc/group:ro \
-          -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" 
--workdir=/hbase hbase-integration-test \
-          "component/dev-support/hbase_nightly_source-artifact.sh" \
-          --intermediate-file-dir output-srctarball \
-          --unpack-temp-dir unpacked_src_tarball \
-          --maven-m2-initial .m2-for-repo \
-          --maven-m2-src-build .m2-for-src \
-          --clean-source-checkout \
-          component
-        if [ $? -eq 0 ]; then
-          echo '(/) {color:green}+1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
-        else
-          echo '(x) {color:red}-1 source release artifact{color}\n-- See build 
output for details.' >output-srctarball/commentfile
-          exit 1
-        fi
-      '''
-      echo "unpacking the hbase bin tarball into 'hbase-install' and the 
client tarball into 'hbase-client'"
-      sh '''#!/bin/bash -e
-        if [ 2 -ne $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v hadoop3 | wc -l) ]; then
-          echo '(x) {color:red}-1 testing binary artifact{color}\n-- source 
tarball did not produce the expected binaries.' >>output-srctarball/commentfile
-          exit 1
-        fi
-        if [[ "${BRANCH_NAME}" == *"branch-2"* ]]; then
-          if [ 2 -eq $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | wc -l) ]; then
-             echo '(x) {color:red}-1 testing binary artifact{color}\n-- source 
tarball did not produce the expected hadoop3 binaries.' 
>>output-srctarball/commentfile
-          fi
-        fi
-      '''
-      stash name: 'hbase-install', includes: 
"${env.WORKSPACE}/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz"
-    }
-  }
-  stage ('integration test') {
-    agent none
-    environment {
-      BASEDIR = "${env.WORKSPACE}/component"
-      BRANCH = "${env.BRANCH_NAME}"
-    }
-    steps {
-    parallel {
-      stage('hadoop 2 integration test') {
-        agent {
-          node {
-            label 'hbase'
-          }
+
+    stage('packaging test') {
+      agent {
+        node {
+          label 'hbase'
         }
-        environment {
-          OUTPUT_DIR = "output-integration-hadoop-${env.HADOOP2_VERSION}"
+      }
+      environment {
+        BASEDIR = "${env.WORKSPACE}/component"
+      }
+      steps {
+        dir('component') {
+          checkout scm
         }
-        steps {
-          sh '''#!/bin/bash -e
-            echo "Setting up directories"
-            rm -rf "${OUTPUT_DIR}"
-            echo "(x) {color:red}-1 client integration test{color}\n-- 
Something went wrong with this stage, [check relevant console 
output|${BUILD_URL}/console]." >${OUTPUT_DIR}/commentfile
-            rm -rf "hbase-install"
-            rm -rf "hbase-client"
-            rm -rf "hbase-hadoop3-install"
-            rm -rf "hbase-hadoop3-client"
-            # remove old hadoop tarballs in workspace
-            rm -rf hadoop-2*.tar.gz
-          '''
-          unstash 'hadoop-2'
-          unstash 'hbase-install'
-          sh '''#!/bin/bash -xe
-            if [[ "${BRANCH_NAME}" == *"branch-2"* ]]; then
-              echo "Attempting to run an instance on top of Hadoop 2."
-              hadoop_artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head 
-n 1)
-              tar --strip-components=1 -xzf "${hadoop_artifact}" -C "hadoop-2"
-              install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v client-bin | grep -v hadoop3)
-              tar --strip-component=1 -xzf "${install_artifact}" -C 
"hbase-install"
-              client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz
 | grep -v hadoop3)
-              tar --strip-component=1 -xzf "${client_artifact}" -C 
"hbase-client"
-              docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
-              docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-                -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-8" 
--workdir=/hbase hbase-integration-test \
-                component/dev-support/hbase_nightly_pseudo-distributed-test.sh 
\
-                --single-process \
-                --working-dir ${OUTPUT_DIR}/hadoop-2 \
-                --hbase-client-install "hbase-client" \
-                hbase-install \
-                hadoop-2/bin/hadoop \
-                hadoop-2/share/hadoop/yarn/timelineservice \
-                
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                hadoop-2/bin/mapred \
-                >${OUTPUT_DIR}/hadoop-2.log 2>&1
-              if [ $? -ne 0 ]; then
-                echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 2. [see log 
for details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop-2.log]. (note that this 
means we didn't run on Hadoop 3)" >${OUTPUT_DIR}/commentfile
-                exit 2
-              fi
-              echo "(/) {color:green}+1 client integration test for HBase 2 
{color}" >${OUTPUT_DIR}/commentfile
-            else
-              echo "Skipping to run against Hadoop 2 for branch ${BRANCH_NAME}"
+        sh '''#!/bin/bash -e
+          echo "Setting up directories"
+          rm -rf "output-srctarball" && mkdir "output-srctarball"
+          rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
+          rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
+          rm -rf ".m2-for-src" && mkdir ".m2-for-src"
+        '''
+        sh '''#!/usr/bin/env bash
+          set -e
+          rm -rf "output-srctarball/machine" && mkdir 
"output-srctarball/machine"
+          "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"output-srctarball/machine"
+          echo "got the following saved stats in 'output-srctarball/machine'"
+          ls -lh "output-srctarball/machine"
+        '''
+        sh '''#!/bin/bash -e
+          echo "Checking the steps for an RM to make a source artifact, then a 
binary artifact."
+          docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+          docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+            -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" 
--workdir=/hbase hbase-integration-test \
+            "component/dev-support/hbase_nightly_source-artifact.sh" \
+            --intermediate-file-dir output-srctarball \
+            --unpack-temp-dir unpacked_src_tarball \
+            --maven-m2-initial .m2-for-repo \
+            --maven-m2-src-build .m2-for-src \
+            --clean-source-checkout \
+            component
+          if [ $? -eq 0 ]; then
+            echo '(/) {color:green}+1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
+          else
+            echo '(x) {color:red}-1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
+            exit 1
+          fi
+        '''
+        echo "unpacking the hbase bin tarball into 'hbase-install' and the 
client tarball into 'hbase-client'"
+        sh '''#!/bin/bash -e
+          if [ 2 -ne $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v hadoop3 | wc -l) ]; then
+            echo '(x) {color:red}-1 testing binary artifact{color}\n-- source 
tarball did not produce the expected binaries.' >>output-srctarball/commentfile
+            exit 1
+          fi
+          if [[ "${BRANCH_NAME}" == *"branch-2"* ]]; then
+            if [ 2 -eq $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | wc -l) ]; then
+              echo '(x) {color:red}-1 testing binary artifact{color}\n-- 
source tarball did not produce the expected hadoop3 binaries.' 
>>output-srctarball/commentfile
             fi
-          '''
-        }
+          fi
+        '''
+        stash name: 'hbase-install', includes: 
"${env.WORKSPACE}/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz"
       }
-      stage('hadoop 3 integration tests') {
-        matrix {
-          axes {
-            axis {
-              name 'HADOOP_VERSION'
-              values getHadoopVersions(env.HADOOP3_VERSIONS)
+    }
+
+    stage('integration test') {
+      agent none
+      environment {
+        BASEDIR = "${env.WORKSPACE}/component"
+        BRANCH = "${env.BRANCH_NAME}"
+      }
+      steps {
+        parallel {
+          stage('hadoop 2 integration test') {
+            agent {
+              node {
+                label 'hbase'
+              }
             }
-          }
-          agent {
-            node {
-              label 'hbase'
+            environment {
+              OUTPUT_DIR = "output-integration-hadoop-${env.HADOOP2_VERSION}"
             }
-          }
-          stages {
-            stage('hadoop 3 integration test') {
-              environment {
-                OUTPUT_DIR = "output-integration-hadoop-${env.HADOOP_VERSION}"
-              }
-              steps {
-                dir('component') {
-                  checkout scm
-                }
-                sh '''#!/bin/bash -e
-                  echo "Setting up directories"
-                  rm -rf "${OUTPUT_DIR}"
-                  echo "(x) {color:red}-1 client integration test{color}\n-- 
Something went wrong with this stage, [check relevant console 
output|${BUILD_URL}/console]." >${OUTPUT_DIR}/commentfile
-                  rm -rf "hbase-install"
-                  rm -rf "hbase-client"
-                  rm -rf "hbase-hadoop3-install"
-                  rm -rf "hbase-hadoop3-client"
-                  # remove old hadoop tarballs in workspace
-                  rm -rf hadoop-3*.tar.gz
-                '''
-                unstash "hadoop-${HADOOP_VERSION}"
-                unstash 'hbase-install'
-                sh '''#!/bin/bash -e
-                  echo "Attempting to use run an instance on top of Hadoop 
${HADOOP_VERSION}."
-                  # Clean up any previous tested Hadoop3 files before 
unpacking the current one
-                  rm -rf hadoop-3/*
-                  # Create working dir
-                  rm -rf "${OUTPUT_DIR}/non-shaded" && mkdir 
"${OUTPUT_DIR}/non-shaded"
-                  rm -rf "${OUTPUT_DIR}/shaded" && mkdir "${OUTPUT_DIR}/shaded"
-                  artifact=$(ls -1 
"${WORKSPACE}"/hadoop-${HADOOP_VERSION}-bin.tar.gz | head -n 1)
-                  tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
-                  # we need to patch some files otherwise minicluster will 
fail to start, see MAPREDUCE-7471
-                  ${BASEDIR}/dev-support/patch-hadoop3.sh hadoop-3
-                  hbase_install_dir="hbase-install"
-                  hbase_client_dir="hbase-client"
-                  if [ -d "hbase-hadoop3-install" ]; then
-                    echo "run hadoop3 client integration test against hbase 
hadoop3 binaries"
-                    hbase_install_dir="hbase-hadoop3-install"
-                    hbase_client_dir="hbase-hadoop3-client"
-                  fi
+            steps {
+              sh '''#!/bin/bash -e
+                echo "Setting up directories"
+                rm -rf "${OUTPUT_DIR}"
+                echo "(x) {color:red}-1 client integration test{color}\n-- 
Something went wrong with this stage, [check relevant console 
output|${BUILD_URL}/console]." >${OUTPUT_DIR}/commentfile
+                rm -rf "hbase-install"
+                rm -rf "hbase-client"
+                rm -rf "hbase-hadoop3-install"
+                rm -rf "hbase-hadoop3-client"
+                rm -rf hadoop-2*.tar.gz
+              '''
+              unstash 'hadoop-2'
+              unstash 'hbase-install'
+              sh '''#!/bin/bash -xe
+                if [[ "${BRANCH_NAME}" == *"branch-2"* ]]; then
+                  echo "Attempting to run an instance on top of Hadoop 2."
+                  hadoop_artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | 
head -n 1)
+                  tar --strip-components=1 -xzf "${hadoop_artifact}" -C 
"hadoop-2"
+                  install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v client-bin | grep -v hadoop3)
+                  tar --strip-component=1 -xzf "${install_artifact}" -C 
"hbase-install"
+                  client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz
 | grep -v hadoop3)
+                  tar --strip-component=1 -xzf "${client_artifact}" -C 
"hbase-client"
                   docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
                   docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-                    -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
-                    -e HADOOP_OPTS="--add-opens 
java.base/java.lang=ALL-UNNAMED" \
-                    --workdir=/hbase hbase-integration-test \
+                    -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-8" 
--workdir=/hbase hbase-integration-test \
                     
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
                     --single-process \
-                    --working-dir ${OUTPUT_DIR}/non-shaded \
-                    --hbase-client-install ${hbase_client_dir} \
-                    ${hbase_install_dir} \
-                    hadoop-3/bin/hadoop \
-                    hadoop-3/share/hadoop/yarn/timelineservice \
-                    
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                    
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                    hadoop-3/bin/mapred \
-                    >${OUTPUT_DIR}/hadoop.log 2>&1
+                    --working-dir ${OUTPUT_DIR}/hadoop-2 \
+                    --hbase-client-install "hbase-client" \
+                    hbase-install \
+                    hadoop-2/bin/hadoop \
+                    hadoop-2/share/hadoop/yarn/timelineservice \
+                    
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+                    
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                    hadoop-2/bin/mapred \
+                    >${OUTPUT_DIR}/hadoop-2.log 2>&1
                   if [ $? -ne 0 ]; then
-                    echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION}. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop.log]. (note that this means 
we didn't check the Hadoop ${HADOOP_VERSION} shaded client)" > 
${OUTPUT_DIR}/commentfile
+                    echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 2. [see log 
for details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop-2.log]. (note that this 
means we didn't run on Hadoop 3)" >${OUTPUT_DIR}/commentfile
                     exit 2
                   fi
-                  echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} {color}" >> ${OUTPUT_DIR}/commentfile
-                  echo "Attempting to run an instance on top of Hadoop 
${HADOOP_VERSION}, relying on the Hadoop client artifacts for the example 
client program."
-                  docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-                    -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
-                    -e HADOOP_OPTS="--add-opens 
java.base/java.lang=ALL-UNNAMED" \
-                    --workdir=/hbase hbase-integration-test \
-                    
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
-                    --single-process \
-                    --hadoop-client-classpath 
hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar
 \
-                    --working-dir ${OUTPUT_DIR}/shade \
-                    --hbase-client-install ${hbase_client_dir} \
-                    ${hbase_install_dir} \
-                    hadoop-3/bin/hadoop \
-                    hadoop-3/share/hadoop/yarn/timelineservice \
-                    
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                    
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                    hadoop-3/bin/mapred \
-                    >${OUTPUT_DIR}/hadoop-shaded.log 2>&1
-                  if [ $? -ne 0 ]; then
-                    echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION} using Hadoop's shaded client. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop-shaded.log]." >> 
${OUTPUT_DIR}/commentfile
-                    exit 2
-                  fi
-                  echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} with shaded hadoop client {color}" >> 
${OUTPUT_DIR}/commentfile
-                '''
+                  echo "(/) {color:green}+1 client integration test for HBase 
2 {color}" >${OUTPUT_DIR}/commentfile
+                else
+                  echo "Skipping to run against Hadoop 2 for branch 
${BRANCH_NAME}"
+                fi
+              '''
+            }
+          }
+
+          stage('hadoop 3 integration tests') {
+            matrix {
+              axes {
+                axis {
+                  name 'HADOOP_VERSION'
+                  values getHadoopVersions(env.HADOOP3_VERSIONS)
+                }
+              }
+              agent {
+                node {
+                  label 'hbase'
+                }
+              }
+              stages {
+                stage('hadoop 3 integration test') {
+                  environment {
+                    OUTPUT_DIR = 
"output-integration-hadoop-${env.HADOOP_VERSION}"
+                  }
+                  steps {
+                    dir('component') {
+                      checkout scm
+                    }
+                    sh '''#!/bin/bash -e
+                      echo "Setting up directories"
+                      rm -rf "${OUTPUT_DIR}"
+                      echo "(x) {color:red}-1 client integration 
test{color}\n-- Something went wrong with this stage, [check relevant console 
output|${BUILD_URL}/console]." >${OUTPUT_DIR}/commentfile
+                      rm -rf "hbase-install"
+                      rm -rf "hbase-client"
+                      rm -rf "hbase-hadoop3-install"
+                      rm -rf "hbase-hadoop3-client"
+                      rm -rf hadoop-3*.tar.gz
+                    '''
+                    unstash "hadoop-${HADOOP_VERSION}"
+                    unstash 'hbase-install'
+                    sh '''#!/bin/bash -e
+                      echo "Attempting to use run an instance on top of Hadoop 
${HADOOP_VERSION}."
+                      rm -rf hadoop-3/*
+                      rm -rf "${OUTPUT_DIR}/non-shaded" && mkdir 
"${OUTPUT_DIR}/non-shaded"
+                      rm -rf "${OUTPUT_DIR}/shaded" && mkdir 
"${OUTPUT_DIR}/shaded"
+                      artifact=$(ls -1 
"${WORKSPACE}"/hadoop-${HADOOP_VERSION}-bin.tar.gz | head -n 1)
+                      tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
+                      ${BASEDIR}/dev-support/patch-hadoop3.sh hadoop-3
+                      hbase_install_dir="hbase-install"
+                      hbase_client_dir="hbase-client"
+                      if [ -d "hbase-hadoop3-install" ]; then
+                        echo "run hadoop3 client integration test against 
hbase hadoop3 binaries"
+                        hbase_install_dir="hbase-hadoop3-install"
+                        hbase_client_dir="hbase-hadoop3-client"
+                      fi
+                      docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+                      docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                        -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" 
\
+                        -e HADOOP_OPTS="--add-opens 
java.base/java.lang=ALL-UNNAMED" \
+                        --workdir=/hbase hbase-integration-test \
+                        
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
+                        --single-process \
+                        --working-dir ${OUTPUT_DIR}/non-shaded \
+                        --hbase-client-install ${hbase_client_dir} \
+                        ${hbase_install_dir} \
+                        hadoop-3/bin/hadoop \
+                        hadoop-3/share/hadoop/yarn/timelineservice \
+                        
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+                        
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                        hadoop-3/bin/mapred \
+                        >${OUTPUT_DIR}/hadoop.log 2>&1
+                      if [ $? -ne 0 ]; then
+                        echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION}. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop.log]. (note that this means 
we didn't check the Hadoop ${HADOOP_VERSION} shaded client)" > 
${OUTPUT_DIR}/commentfile
+                        exit 2
+                      fi
+                      echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} {color}" >> ${OUTPUT_DIR}/commentfile
+                      echo "Attempting to run an instance on top of Hadoop 
${HADOOP_VERSION}, relying on the Hadoop client artifacts for the example 
client program."
+                      docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                        -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" 
\
+                        -e HADOOP_OPTS="--add-opens 
java.base/java.lang=ALL-UNNAMED" \
+                        --workdir=/hbase hbase-integration-test \
+                        
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
+                        --single-process \
+                        --hadoop-client-classpath 
hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar
 \
+                        --working-dir ${OUTPUT_DIR}/shade \
+                        --hbase-client-install ${hbase_client_dir} \
+                        ${hbase_install_dir} \
+                        hadoop-3/bin/hadoop \
+                        hadoop-3/share/hadoop/yarn/timelineservice \
+                        
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+                        
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                        hadoop-3/bin/mapred \
+                        >${OUTPUT_DIR}/hadoop-shaded.log 2>&1
+                      if [ $? -ne 0 ]; then
+                        echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 
${HADOOP_VERSION} using Hadoop's shaded client. [see log for 
details|${BUILD_URL}/artifact/${OUTPUT_DIR}/hadoop-shaded.log]." >> 
${OUTPUT_DIR}/commentfile
+                        exit 2
+                      fi
+                      echo "(/) {color:green}+1 client integration test for 
${HADOOP_VERSION} with shaded hadoop client {color}" >> 
${OUTPUT_DIR}/commentfile
+                    '''
+                  }
+                }
               }
             }
           }
         }
       }
     }
-  } // stage integration test
   }
+
   post {
     always {
       sshPublisher(publishers: [
@@ -401,6 +398,7 @@ pipeline {
 }
 
 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
+
 @NonCPS
 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
   def seenJiras = []
@@ -417,6 +415,7 @@ List<String> getJirasToCommentFromChangesets(RunWrapper 
thisBuild) {
   }
   return seenJiras
 }
+
 @NonCPS
 List<String> getJirasToComment(CharSequence source, List<String> seen) {
   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
@@ -430,6 +429,7 @@ List<String> getJirasToComment(CharSequence source, 
List<String> seen) {
   }
   return seen
 }
+
 @NonCPS
 def getHadoopVersions(versionString) {
   return (versionString ?: "")

Reply via email to