This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch HBASE-29930
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/HBASE-29930 by this push:
new f3189a6a6df add more steps
f3189a6a6df is described below
commit f3189a6a6df4452e029887ef85d895eb6f42d297
Author: Duo Zhang <[email protected]>
AuthorDate: Sat Feb 28 19:24:44 2026 +0800
add more steps
---
.../integration-test/integration-test.Jenkinsfile | 89 ++++++++++++++++++++--
1 file changed, 83 insertions(+), 6 deletions(-)
diff --git a/dev-support/integration-test/integration-test.Jenkinsfile
b/dev-support/integration-test/integration-test.Jenkinsfile
index 302e40640be..521d0ad011d 100644
--- a/dev-support/integration-test/integration-test.Jenkinsfile
+++ b/dev-support/integration-test/integration-test.Jenkinsfile
@@ -40,7 +40,7 @@ pipeline {
booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a
lot more meta-information.')
}
stages {
- stage ('scm-checkout') {
+ stage('scm-checkout') {
steps {
dir('component') {
checkout scm
@@ -59,8 +59,7 @@ pipeline {
rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
rm -rf ".m2-for-src" && mkdir ".m2-for-src"
'''
- sh '''#!/usr/bin/env bash
- set -e
+ sh '''#!/bin/bash -e
rm -rf "output-srctarball/machine" && mkdir
"output-srctarball/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh"
"output-srctarball/machine"
echo "got the following saved stats in 'output-srctarball/machine'"
@@ -87,7 +86,6 @@ pipeline {
'''
echo "make sure we have proper hbase tarballs under hbase-assembly"
sh '''#!/bin/bash -e
- ls
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz
if [ 2 -ne $(ls -1
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz |
grep -v hadoop3 | wc -l) ]; then
echo '(x) {color:red}-1 testing binary artifact{color}\n-- source
tarball did not produce the expected binaries.' >>output-srctarball/commentfile
exit 1
@@ -99,9 +97,88 @@ pipeline {
fi
fi
'''
- stash name: 'hbase-install', includes:
"${env.WORKSPACE}/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz"
+ stash name: 'hbase-install', includes:
"unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz"
} // steps
- } // packaing test
+ } // packaging test
+ stage ('integration test matrix') {
+ matrix {
+ agent {
+ label 'hbase'
+ }
+ axes {
+ axis {
+ name 'HADOOP_VERSION'
+ values getHadoopVersions(env.HADOOP2_VERSIONS,
env.HADOOP3_VERSIONS)
+ }
+ }
+ stages {
+ environment {
+ BASEDIR = "${env.WORKSPACE}/component"
+ OUTPUT_DIR = "output-integration-hadoop-${env.HADOOP_VERSION}"
+ }
+ stage('scm-checkout') {
+ steps {
+ sh '''#!/bin/bash -e
+ echo "Setting up directories"
+ rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
+ echo "(x) {color:red}-1 client integration test{color}\n--
Something went wrong with this stage, [check relevant console
output|${BUILD_URL}/console]." >${OUTPUT_DIR}/commentfile
+ rm -rf "unpacked_src_tarball"
+ rm -rf "hbase-install"
+ rm -rf "hbase-client"
+ rm -rf "hbase-hadoop3-install"
+ rm -rf "hbase-hadoop3-client"
+ # remove old hadoop tarballs in workspace
+ rm -rf hadoop-*.tar.gz
+ '''
+ dir('component') {
+ checkout scm
+ }
+ }
+ }
+ stage('download hadoop') {
+ dir("downloads-hadoop") {
+ sh '''#!/bin/bash -e
+ echo "Make sure we have a directory for downloading
dependencies: $(pwd)"
+ '''
+ sh '''#!/bin/bash -e
+
+ echo "Ensure we have a copy of Hadoop ${HADOOP_VERSION}"
+
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
\
+ --working-dir "${WORKSPACE}/downloads-hadoop" \
+ --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
+ --verify-tar-gz \
+ "${WORKSPACE}/hadoop-${HADOOP_VERSION}-bin.tar.gz" \
+
"hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz"
+ for stale in $(ls -1 "${WORKSPACE}"/hadoop-*.tar.gz | grep -v
${HADOOP_VERSION}); do
+ echo "Delete stale hadoop cache ${stale}"
+ rm -rf $stale
+ done
+ '''
+ } // dir
+ } // download hadoop
+ stage('integration test ') {
+ steps {
+ unstash 'hbase-install'
+ sh'''#!/bin/bash -e
+ install_artifact=$(ls -1
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz |
grep -v client-bin | grep -v hadoop3)
+ tar --strip-component=1 -xzf "${install_artifact}" -C
"hbase-install"
+ client_artifact=$(ls -1
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz
| grep -v hadoop3)
+ tar --strip-component=1 -xzf "${client_artifact}" -C
"hbase-client"
+ if [ 2 -eq $(ls -1
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
| wc -l) ]; then
+ echo "hadoop3 artifacts available, unpacking the hbase
hadoop3 bin tarball into 'hbase-hadoop3-install' and the client hadoop3 tarball
into 'hbase-hadoop3-client'"
+ mkdir hbase-hadoop3-install
+ mkdir hbase-hadoop3-client
+ hadoop3_install_artifact=$(ls -1
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
| grep -v client-bin)
+ tar --strip-component=1 -xzf "${hadoop3_install_artifact}"
-C "hbase-hadoop3-install"
+ hadoop3_client_artifact=$(ls -1
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-client-bin.tar.gz)
+ tar --strip-component=1 -xzf "${hadoop3_client_artifact}" -C
"hbase-hadoop3-client"
+ fi
+ '''
+ } // steps
+ } // integration test
+ } // stages
+ } // matrix
+ } // integration test matrix
} // stages
post {
always {