This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-29930
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit a2f5783aee71df7a9003bd0495aae4f1d97b20a7
Author: Duo Zhang <[email protected]>
AuthorDate: Thu Feb 26 16:48:32 2026 +0800

    HBASE-29930 Separate packaging and integration check in nightly job to new 
jenkins job
---
 .../integration-test/integration-test.Jenkinsfile  | 294 +++++++++++++++++++++
 .../{ => integration-test}/patch-hadoop3.sh        |   0
 .../pseudo-distributed-test.sh}                    |   0
 .../source-artifact.sh}                            |  12 +-
 4 files changed, 301 insertions(+), 5 deletions(-)

diff --git a/dev-support/integration-test/integration-test.Jenkinsfile 
b/dev-support/integration-test/integration-test.Jenkinsfile
new file mode 100644
index 00000000000..647a30a3e69
--- /dev/null
+++ b/dev-support/integration-test/integration-test.Jenkinsfile
@@ -0,0 +1,294 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+    node {
+      label 'hbase'
+    }
+  }
+  triggers {
+    cron('@daily')
+  }
+  options {
+    buildDiscarder(logRotator(numToKeepStr: '20'))
+    timeout (time: 16, unit: 'HOURS')
+    timestamps()
+    skipDefaultCheckout()
+    disableConcurrentBuilds()
+  }
+  environment {
+    YETUS_RELEASE = '0.15.0'
+    HADOOP2_VERSIONS = "2.10.2"
+    HADOOP3_VERSIONS = "3.3.5,3.3.6,3.4.0,3.4.1,3.4.2,3.4.3"
+    BASEDIR = "${env.WORKSPACE}/component"
+  }
+  parameters {
+    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+    stage('scm-checkout') {
+      steps {
+        dir('component') {
+          checkout scm
+        }
+      }
+    }
+    // This is meant to mimic what a release manager will do to create RCs.
+    // See http://hbase.apache.org/book.html#maven.release
+    // TODO (HBASE-23870): replace this with invocation of the release tool
+    stage ('packaging test') {
+      steps {
+        sh '''#!/bin/bash -e
+          echo "Setting up directories"
+          rm -rf "output-srctarball" && mkdir "output-srctarball"
+          rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
+          rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
+          rm -rf ".m2-for-src" && mkdir ".m2-for-src"
+        '''
+        sh '''#!/bin/bash -e
+          rm -rf "output-srctarball/machine" && mkdir 
"output-srctarball/machine"
+          "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"output-srctarball/machine"
+          echo "got the following saved stats in 'output-srctarball/machine'"
+          ls -lh "output-srctarball/machine"
+        '''
+        sh '''#!/bin/bash -e
+          echo "Checking the steps for an RM to make a source artifact, then a 
binary artifact."
+          docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+          docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+            -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" 
--workdir=/hbase hbase-integration-test \
+            "component/dev-support/integration-test/source-artifact.sh" \
+            --intermediate-file-dir output-srctarball \
+            --unpack-temp-dir unpacked_src_tarball \
+            --maven-m2-initial .m2-for-repo \
+            --maven-m2-src-build .m2-for-src \
+            --clean-source-checkout \
+            component
+          if [ $? -eq 0 ]; then
+            echo '(/) {color:green}+1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
+          else
+            echo '(x) {color:red}-1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
+            exit 1
+          fi
+        '''
+        echo "make sure we have proper hbase tarballs under hbase-assembly"
+        sh '''#!/bin/bash -e
+          if [ 2 -ne $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v hadoop3 | wc -l) ]; then
+            echo '(x) {color:red}-1 testing binary artifact{color}\n-- source 
tarball did not produce the expected binaries.' >>output-srctarball/commentfile
+            exit 1
+          fi
+          if [[ "${BRANCH_NAME}" == *"branch-2"* ]]; then
+            if [ 2 -eq $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | wc -l) ]; then
+               echo '(x) {color:red}-1 testing binary artifact{color}\n-- 
source tarball did not produce the expected hadoop3 binaries.' 
>>output-srctarball/commentfile
+               exit 1
+            fi
+          fi
+        '''
+        stash name: 'hbase-install', includes: 
"unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz"
+      } // steps
+    } // packaging test
+    stage ('integration test matrix') {
+      matrix {
+        agent {
+          node {
+            label 'hbase'
+          }
+        }
+        axes {
+          axis {
+            name 'HADOOP_VERSION'
+            // matrix does not support dynamic axis values, so here we need to 
keep align with the
+            // above environment
+            values "2.10.2","3.3.5","3.3.6","3.4.0","3.4.1","3.4.2","3.4.3"
+          }
+        }
+        environment {
+          BASEDIR = "${env.WORKSPACE}/component"
+          OUTPUT_DIR = "output-integration-hadoop-${env.HADOOP_VERSION}"
+        }
+        stages {
+          stage('scm-checkout') {
+            steps {
+              sh '''#!/bin/bash -e
+                echo "Setting up directories"
+                rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
+                echo "(x) {color:red}-1 client integration test{color}\n-- 
Something went wrong with this stage, [check relevant console 
output|${BUILD_URL}/console]." >${OUTPUT_DIR}/commentfile
+                rm -rf "unpacked_src_tarball"
+                rm -rf "hbase-install"
+                rm -rf "hbase-client"
+                rm -rf "hbase-hadoop3-install"
+                rm -rf "hbase-hadoop3-client"
+                # remove old hadoop tarballs in workspace
+                rm -rf hadoop-*.tar.gz
+              '''
+              dir('component') {
+                checkout scm
+              }
+            } // steps
+          } // scm-checkout
+          stage('download hadoop') {
+            steps {
+              dir("downloads-hadoop") {
+                sh '''#!/bin/bash -e
+                  echo "Make sure we have a directory for downloading 
dependencies: $(pwd)"
+                '''
+                sh '''#!/bin/bash -e
+                  echo "Ensure we have a copy of Hadoop ${HADOOP_VERSION}"
+                  
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
 \
+                    --working-dir "${WORKSPACE}/downloads-hadoop" \
+                    --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
+                    --verify-tar-gz \
+                    "${WORKSPACE}/hadoop-${HADOOP_VERSION}-bin.tar.gz" \
+                    
"hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz"
+                  for stale in $(ls -1 "${WORKSPACE}"/hadoop-*.tar.gz | grep 
-v ${HADOOP_VERSION}); do
+                    echo "Delete stale hadoop cache ${stale}"
+                    rm -rf $stale
+                  done
+                '''
+              } // dir
+            } // steps
+          } // download hadoop
+          stage('integration test ') {
+            steps {
+              unstash 'hbase-install'
+              sh'''#!/bin/bash -e
+                install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v client-bin | grep -v hadoop3)
+                tar --strip-component=1 -xzf "${install_artifact}" -C 
"hbase-install"
+                client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz
 | grep -v hadoop3)
+                tar --strip-component=1 -xzf "${client_artifact}" -C 
"hbase-client"
+                if [ 2 -eq $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | wc -l) ]; then
+                  echo "hadoop3 artifacts available, unpacking the hbase 
hadoop3 bin tarball into 'hbase-hadoop3-install' and the client hadoop3 tarball 
into 'hbase-hadoop3-client'"
+                  mkdir hbase-hadoop3-install
+                  mkdir hbase-hadoop3-client
+                  hadoop3_install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | grep -v client-bin)
+                  tar --strip-component=1 -xzf "${hadoop3_install_artifact}" 
-C "hbase-hadoop3-install"
+                  hadoop3_client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-client-bin.tar.gz)
+                  tar --strip-component=1 -xzf "${hadoop3_client_artifact}" -C 
"hbase-hadoop3-client"
+                fi
+              '''
+            } // steps
+          } // integration test
+        } // stages
+      } // matrix
+    } // integration test matrix
+  } // stages
+  post {
+    always {
+      script {
+        def srcFile = "${env.WORKSPACE}/output-srctarball/hbase-src.tar.gz"
+        if (fileExists(srcFile)) {
+          echo "upload hbase-src.tar.gz to nightlies"
+          sshPublisher(publishers: [
+            sshPublisherDesc(configName: 'Nightlies',
+              transfers: [
+                sshTransfer(remoteDirectory: 
"hbase/${JOB_NAME}/${BUILD_NUMBER}",
+                  sourceFiles: srcFile
+                )
+              ]
+            )
+          ])
+          // remove the big src tarball, store the nightlies url in 
hbase-src.html
+          sh '''#!/bin/bash -e
+            SRC_TAR="${WORKSPACE}/output-srctarball/hbase-src.tar.gz"
+            echo "Remove ${SRC_TAR} for saving space"
+            rm -rf "${SRC_TAR}"
+            python3 ${BASEDIR}/dev-support/gen_redirect_html.py 
"${ASF_NIGHTLIES_BASE}/output-srctarball" > 
"${WORKSPACE}/output-srctarball/hbase-src.html"
+          '''
+        }
+      }
+      archiveArtifacts artifacts: 'output-srctarball/*'
+      archiveArtifacts artifacts: 'output-srctarball/**/*'
+      archiveArtifacts artifacts: 'output-integration-*/*'
+      archiveArtifacts artifacts: 'output-integration-*/**/*'
+      script {
+        def results = []
+        results.add('output-srctarball/commentfile')
+        
results.add("output-integration-hadoop-${env.HADOOP_VERSION}/commentfile")
+        for (hadoop_version in getHadoopVersions(env.HADOOP2_VERSIONS, 
env.HADOOP3_VERSIONS)) {
+          
results.add("output-integration-hadoop-${hadoop_version}/commentfile")
+        }
+        echo env.BRANCH_NAME
+        echo env.BUILD_URL
+        echo currentBuild.result
+        echo currentBuild.durationString
+        def comment = "Results for branch ${env.BRANCH_NAME}\n"
+        comment += "\t[build ${currentBuild.displayName} on 
builds.a.o|${env.BUILD_URL}]: "
+        if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
+          comment += "(/) *{color:green}+1 overall{color}*\n"
+        } else {
+          comment += "(x) *{color:red}-1 overall{color}*\n"
+          // Ideally get the committer our of the change and @ mention them in 
the per-jira comment
+        }
+        comment += "----\ndetails (if available):\n\n"
+        echo ""
+        echo "[DEBUG] trying to aggregate step-wise results"
+        comment += results.collect { fileExists(file: it) ? readFile(file: it) 
: "" }.join("\n\n")
+        echo "[INFO] Comment:"
+        echo comment
+        echo ""
+        echo "[DEBUG] checking to see if feature branch"
+        def jiras = getJirasToComment(env.BRANCH_NAME, [])
+        if (jiras.isEmpty()) {
+          echo "[DEBUG] non-feature branch, checking change messages for jira 
keys."
+          echo "[INFO] There are ${currentBuild.changeSets.size()} change 
sets."
+          jiras = getJirasToCommentFromChangesets(currentBuild)
+        }
+        jiras.each { currentIssue ->
+          jiraComment issueKey: currentIssue, body: comment
+        }
+      } // script
+    } // always
+  } // post
+}
+
+@NonCPS
+List<String> getHadoopVersions(String hadoop2Versions, String hadoop3Versions) 
{
+  return hadoop2Versions.split(',').collect { it.trim() }.findAll { it } as 
String[]
+    + hadoop3Versions.split(',').collect { it.trim() }.findAll { it } as 
String[]
+}
+
+import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
+@NonCPS
+List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
+  def seenJiras = []
+  thisBuild.changeSets.each { cs ->
+    cs.getItems().each { change ->
+      CharSequence msg = change.msg
+      echo "change: ${change}"
+      echo "     ${msg}"
+      echo "     ${change.commitId}"
+      echo "     ${change.author}"
+      echo ""
+      seenJiras = getJirasToComment(msg, seenJiras)
+    }
+  }
+  return seenJiras
+}
+
+@NonCPS
+List<String> getJirasToComment(CharSequence source, List<String> seen) {
+  source.eachMatch("HBASE-[0-9]+") { currentIssue ->
+    echo "[DEBUG] found jira key: ${currentIssue}"
+    if (currentIssue in seen) {
+      echo "[DEBUG] already commented on ${currentIssue}."
+    } else {
+      echo "[INFO] commenting on ${currentIssue}."
+      seen << currentIssue
+    }
+  }
+  return seen
+}
+
diff --git a/dev-support/patch-hadoop3.sh 
b/dev-support/integration-test/patch-hadoop3.sh
similarity index 100%
rename from dev-support/patch-hadoop3.sh
rename to dev-support/integration-test/patch-hadoop3.sh
diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh 
b/dev-support/integration-test/pseudo-distributed-test.sh
similarity index 100%
rename from dev-support/hbase_nightly_pseudo-distributed-test.sh
rename to dev-support/integration-test/pseudo-distributed-test.sh
diff --git a/dev-support/hbase_nightly_source-artifact.sh 
b/dev-support/integration-test/source-artifact.sh
similarity index 98%
rename from dev-support/hbase_nightly_source-artifact.sh
rename to dev-support/integration-test/source-artifact.sh
index 59667408cfa..e0136b8e9b4 100755
--- a/dev-support/hbase_nightly_source-artifact.sh
+++ b/dev-support/integration-test/source-artifact.sh
@@ -34,6 +34,8 @@ function usage {
   exit 1
 }
 
+set -e
+
 MVN="mvn"
 if ! command -v mvn &>/dev/null; then
   MVN=$MAVEN_HOME/bin/mvn
@@ -217,8 +219,11 @@ function build_tarball {
 
 cd "${unpack_dir}"
 
-${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:active-profiles | grep -q 
hadoop-3.0
-if [ $? -ne 0 ]; then
+
+if ${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:active-profiles | grep -q 
hadoop-3.0; then
+  echo "The hadoop-3.0 profile is activated by default, build a default 
tarball."
+  build_tarball 0
+else
   echo "The hadoop-3.0 profile is not activated by default, build a default 
tarball first."
   # use java 8 to build with hadoop2
   JAVA_HOME="/usr/lib/jvm/java-8" build_tarball 0
@@ -235,7 +240,4 @@ if [ $? -ne 0 ]; then
   fi
   # move tarballs back
   mv "${unpack_dir}"/hbase-*-bin.tar.gz "${unpack_dir}"/hbase-assembly/target/
-else
-  echo "The hadoop-3.0 profile is activated by default, build a default 
tarball."
-  build_tarball 0
 fi

Reply via email to