This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch branch-2.5
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2.5 by this push:
new 80dd4fb2e91 HBASE-28678 Make nightly builds for 3.x java 17 only and
add java 17 test for 2.x (#6032)
80dd4fb2e91 is described below
commit 80dd4fb2e9193bccd7f50cace72669caf2d45e44
Author: Duo Zhang <[email protected]>
AuthorDate: Thu Jun 27 21:05:53 2024 +0800
HBASE-28678 Make nightly builds for 3.x java 17 only and add java 17 test
for 2.x (#6032)
Signed-off-by: Xin Sun <[email protected]>
(cherry picked from commit 8ff8748a38d6b7d4725a41b5589d7c38bd36dae9)
---
dev-support/Jenkinsfile | 135 ++++++++++++++++++++++++++++++++++++++-
dev-support/hbase-personality.sh | 29 ++++-----
2 files changed, 143 insertions(+), 21 deletions(-)
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 467f10ad747..3bdbd0eb436 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -203,8 +203,8 @@ pipeline {
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.SHALLOW_CHECKS}"
- SET_JAVA_HOME = "/usr/lib/jvm/java-11"
- JAVA8_HOME="/usr/lib/jvm/java-8"
+ SET_JAVA_HOME = getJavaHomeForYetusGeneralCheck(env.BRANCH_NAME)
+ JAVA8_HOME = "/usr/lib/jvm/java-8"
// Activates hadoop 3.0 profile in maven runs.
HADOOP_PROFILE = '3.0'
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
@@ -305,7 +305,7 @@ pipeline {
}
}
when {
- branch 'branch-2*'
+ branch '*branch-2*'
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
@@ -415,6 +415,9 @@ pipeline {
label 'hbase'
}
}
+ when {
+ branch '*branch-2*'
+ }
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.DEEP_CHECKS}"
@@ -525,6 +528,9 @@ pipeline {
label 'hbase'
}
}
+ when {
+ branch '*branch-2*'
+ }
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.DEEP_CHECKS}"
@@ -629,6 +635,118 @@ pipeline {
}
}
}
+
+ stage ('yetus jdk17 hadoop3 checks') {
+ agent {
+ node {
+ label 'hbase'
+ }
+ }
+ environment {
+ BASEDIR = "${env.WORKSPACE}/component"
+ TESTS = "${env.DEEP_CHECKS}"
+ OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}"
+ OUTPUT_DIR =
"${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}"
+ SET_JAVA_HOME = "/usr/lib/jvm/java-17"
+ // Activates hadoop 3.0 profile in maven runs.
+ HADOOP_PROFILE = '3.0'
+ SKIP_ERRORPRONE = true
+ }
+ steps {
+ // Must do prior to anything else, since if one of them timesout
we'll stash the commentfile
+ sh '''#!/usr/bin/env bash
+ set -e
+ rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
+ echo '(x) {color:red}-1 jdk17 hadoop3 checks{color}'
>"${OUTPUT_DIR}/commentfile"
+ echo "-- Something went wrong running this stage, please [check
relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
+ '''
+ unstash 'yetus'
+ dir('component') {
+ checkout scm
+ }
+ sh '''#!/usr/bin/env bash
+ set -e
+ rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
+ "${BASEDIR}/dev-support/gather_machine_environment.sh"
"${OUTPUT_DIR_RELATIVE}/machine"
+ echo "got the following saved stats in
'${OUTPUT_DIR_RELATIVE}/machine'"
+ ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
+ '''
+ script {
+ def ret = sh(
+ returnStatus: true,
+ script: '''#!/usr/bin/env bash
+ set -e
+ declare -i status=0
+ if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+ echo '(/) {color:green}+1 jdk17 hadoop3 checks{color}' >
"${OUTPUT_DIR}/commentfile"
+ else
+ echo '(x) {color:red}-1 jdk17 hadoop3 checks{color}' >
"${OUTPUT_DIR}/commentfile"
+ status=1
+ fi
+ echo "-- For more information [see jdk17
report|${BUILD_URL}JDK17_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >>
"${OUTPUT_DIR}/commentfile"
+ exit "${status}"
+ '''
+ )
+ if (ret != 0) {
+ // mark the build as UNSTABLE instead of FAILURE, to avoid
skipping the later publish of
+ // test output. See HBASE-26339 for more details.
+ currentBuild.result = 'UNSTABLE'
+ }
+ }
+ }
+ post {
+ always {
+ stash name: 'jdk17-hadoop3-result', includes:
"${OUTPUT_DIR_RELATIVE}/commentfile"
+ junit testResults:
"${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
+ // zip surefire reports.
+ sh '''#!/bin/bash -e
+ if [ -d "${OUTPUT_DIR}/archiver" ]; then
+ count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
+ if [[ 0 -ne ${count} ]]; then
+ echo "zipping ${count} archived files"
+ zip -q -m -r "${OUTPUT_DIR}/test_logs.zip"
"${OUTPUT_DIR}/archiver"
+ else
+ echo "No archived files, skipping compressing."
+ fi
+ else
+ echo "No archiver directory, skipping compressing."
+ fi
+ '''
+ sshPublisher(publishers: [
+ sshPublisherDesc(configName: 'Nightlies',
+ transfers: [
+ sshTransfer(remoteDirectory:
"hbase/${JOB_NAME}/${BUILD_NUMBER}",
+ sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+ )
+ ]
+ )
+ ])
+ // remove the big test logs zip file, store the nightlies url in
test_logs.html
+ sh '''#!/bin/bash -e
+ if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
+ echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+ rm -rf "${OUTPUT_DIR}/test_logs.zip"
+ python3 ${BASEDIR}/dev-support/gen_redirect_html.py
"${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
+ else
+ echo "No test_logs.zip, skipping"
+ fi
+ '''
+ // Has to be relative to WORKSPACE.
+ archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
+ archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
+ publishHTML target: [
+ allowMissing : true,
+ keepAll : true,
+ alwaysLinkToLastBuild: true,
+ // Has to be relative to WORKSPACE.
+ reportDir : "${env.OUTPUT_DIR_RELATIVE}",
+ reportFiles : 'console-report.html',
+ reportName : 'JDK17 Nightly Build Report (Hadoop3)'
+ ]
+ }
+ }
+ }
+
// This is meant to mimic what a release manager will do to create RCs.
// See http://hbase.apache.org/book.html#maven.release
// TODO (HBASE-23870): replace this with invocation of the release tool
@@ -801,12 +919,14 @@ pipeline {
unstash 'jdk8-hadoop2-result'
unstash 'jdk8-hadoop3-result'
unstash 'jdk11-hadoop3-result'
+ unstash 'jdk17-hadoop3-result'
unstash 'srctarball-result'
sh "printenv"
def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
+
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/commentfile",
'output-srctarball/commentfile',
'output-integration/commentfile']
echo env.BRANCH_NAME
@@ -876,3 +996,12 @@ List<String> getJirasToComment(CharSequence source,
List<String> seen) {
}
return seen
}
+@NonCPS
+String getJavaHomeForYetusGeneralCheck(String branchName) {
+ // for 2.x, build with java 11, for 3.x, build with java 17
+ if (branchName.indexOf("branch-2") >=0) {
+ return "/usr/lib/jvm/java-11";
+ } else {
+ return "/usr/lib/jvm/java-17"
+ }
+}
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 67aa2d1d168..25eee1463c1 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -178,7 +178,7 @@ function personality_modules
# If we have HADOOP_PROFILE specified and we're on branch-2.x, pass along
# the hadoop.profile system property. Ensures that Hadoop2 and Hadoop3
# logic is not both activated within Maven.
- if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" = branch-2* ]] ; then
+ if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" == *"branch-2"* ]] ;
then
extra="${extra} -Dhadoop.profile=${HADOOP_PROFILE}"
fi
@@ -490,7 +490,7 @@ function shadedjars_rebuild
# If we have HADOOP_PROFILE specified and we're on branch-2.x, pass along
# the hadoop.profile system property. Ensures that Hadoop2 and Hadoop3
# logic is not both activated within Maven.
- if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" = branch-2* ]] ; then
+ if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" = *"branch-2"* ]] ;
then
maven_args+=("-Dhadoop.profile=${HADOOP_PROFILE}")
fi
@@ -580,14 +580,7 @@ function hadoopcheck_rebuild
# All supported Hadoop versions that we want to test the compilation with
# See the Hadoop section on prereqs in the HBase Reference Guide
- if [[ "${PATCH_BRANCH}" = branch-2.4 ]]; then
- yetus_info "Setting Hadoop 2 versions to test based on branch-2.4 rules."
- if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
- hbase_hadoop2_versions="2.10.2"
- else
- hbase_hadoop2_versions="2.10.0 2.10.1 2.10.2"
- fi
- elif [[ "${PATCH_BRANCH}" = branch-2* ]]; then
+ if [[ "${PATCH_BRANCH}" = *"branch-2"* ]]; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.5+ rules."
hbase_hadoop2_versions="2.10.2"
else
@@ -595,19 +588,19 @@ function hadoopcheck_rebuild
hbase_hadoop2_versions=""
fi
- if [[ "${PATCH_BRANCH}" = branch-2.4 ]]; then
- yetus_info "Setting Hadoop 3 versions to test based on branch-2.4 rules"
+ if [[ "${PATCH_BRANCH}" = *"branch-2.5"* ]]; then
+ yetus_info "Setting Hadoop 3 versions to test based on branch-2.5 rules"
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
- hbase_hadoop3_versions="3.1.4 3.2.4 3.3.6"
+ hbase_hadoop3_versions="3.2.4 3.3.6"
else
- hbase_hadoop3_versions="3.1.1 3.1.2 3.1.3 3.1.4 3.2.0 3.2.1 3.2.2 3.2.3
3.2.4 3.3.0 3.3.1 3.3.2 3.3.3 3.3.4 3.3.5 3.3.6"
+ hbase_hadoop3_versions="3.2.3 3.2.4 3.3.2 3.3.3 3.3.4 3.3.5 3.3.6"
fi
else
- yetus_info "Setting Hadoop 3 versions to test based on
branch-2.5+/master/feature branch rules"
+ yetus_info "Setting Hadoop 3 versions to test based on
branch-2.6+/master/feature branch rules"
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
- hbase_hadoop3_versions="3.2.4 3.3.6"
+ hbase_hadoop3_versions="3.3.6"
else
- hbase_hadoop3_versions="3.2.3 3.2.4 3.3.2 3.3.3 3.3.4 3.3.5 3.3.6"
+ hbase_hadoop3_versions="3.3.5 3.3.6"
fi
fi
@@ -635,7 +628,7 @@ function hadoopcheck_rebuild
done
hadoop_profile=""
- if [[ "${PATCH_BRANCH}" = branch-2* ]]; then
+ if [[ "${PATCH_BRANCH}" == *"branch-2"* ]]; then
hadoop_profile="-Dhadoop.profile=3.0"
fi
for hadoopver in ${hbase_hadoop3_versions}; do