This is an automated email from the ASF dual-hosted git repository.
stoty pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2 by this push:
new 2c554f3154c HBASE-28906 Run nightly tests with multiple Hadoop 3
versions (#6356) (#6399)
2c554f3154c is described below
commit 2c554f3154c7c8496ef005a5cbf7ba57d1c3e087
Author: Istvan Toth <[email protected]>
AuthorDate: Sun Oct 27 08:16:53 2024 +0100
HBASE-28906 Run nightly tests with multiple Hadoop 3 versions (#6356)
(#6399)
includes HBASE-28929 Set hadoop-three.version in Hadoop 3 backwards
compatibility tests
includes addendum: workaround for MAPREDUCE-7492
Signed-off-by: Nick Dimiduk <[email protected]>
Signed-off-by: Duo Zhang <[email protected]>
(cherry picked from commit 4be7e39e5a7953853ef79652c65160021dfa0105)
---
dev-support/Jenkinsfile | 362 +++++++++++++++------
dev-support/hbase-personality.sh | 24 +-
.../hbase_nightly_pseudo-distributed-test.sh | 5 +-
dev-support/hbase_nightly_yetus.sh | 12 +-
4 files changed, 304 insertions(+), 99 deletions(-)
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index f93408d6fd3..4357bb0d0cc 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -38,6 +38,7 @@ pipeline {
OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
OUTPUT_DIR_RELATIVE_JDK17_HADOOP3 = 'output-jdk17-hadoop3'
+ OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS =
'output-jdk17-hadoop3-backwards'
PROJECT = 'hbase'
PROJECT_PERSONALITY =
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
@@ -57,6 +58,9 @@ pipeline {
ASF_NIGHTLIES = 'https://nightlies.apache.org'
ASF_NIGHTLIES_BASE_ORI =
"${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}"
ASF_NIGHTLIES_BASE = "${ASF_NIGHTLIES_BASE_ORI.replaceAll(' ', '%20')}"
+ // These are dependent on the branch
+ HADOOP3_VERSIONS = "3.3.5,3.3.6,3.4.0"
+ HADOOP3_DEFAULT_VERSION = "3.3.5"
}
parameters {
booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false,
description: '''Check to use the current HEAD of apache/yetus rather than our
configured release.
@@ -153,35 +157,47 @@ pipeline {
}
}
stage ('hadoop 3 cache') {
- environment {
- HADOOP3_VERSION="3.3.5"
- }
steps {
- // directory must be unique for each parallel stage, because
jenkins runs them in the same workspace :(
- dir('downloads-hadoop-3') {
- sh '''#!/usr/bin/env bash
- echo "Make sure we have a directory for downloading
dependencies: $(pwd)"
+ script {
+ hadoop3_versions = env.HADOOP3_VERSIONS.split(",");
+ env.HADOOP3_VERSIONS_REGEX = "[" + hadoop3_versions.join("|") +
"]";
+ for (hadoop3_version in hadoop3_versions) {
+ env.HADOOP3_VERSION = hadoop3_version;
+ echo "env.HADOOP3_VERSION" + env.hadoop3_version;
+ stage ('Hadoop 3 cache inner stage') {
+ // directory must be unique for each parallel stage, because
jenkins runs them in the same workspace :(
+ dir("downloads-hadoop-${HADOOP3_VERSION}") {
+ sh '''#!/usr/bin/env bash
+ echo "Make sure we have a directory for downloading
dependencies: $(pwd)"
'''
- }
- sh '''#!/usr/bin/env bash
- set -e
- echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
-
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
\
- --working-dir "${WORKSPACE}/downloads-hadoop-3" \
- --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
- --verify-tar-gz \
- "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
-
"hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
- for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v
${HADOOP3_VERSION}); do
- echo "Delete stale hadoop 3 cache ${stale}"
- rm -rf $stale
- done
- '''
- stash name: 'hadoop-3', includes:
"hadoop-${HADOOP3_VERSION}-bin.tar.gz"
- }
- }
- }
- }
+ } //dir
+ sh '''#!/usr/bin/env bash
+ set -e
+ echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
+
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
\
+ --working-dir
"${WORKSPACE}/downloads-hadoop-${HADOOP3_VERSION}" \
+ --keys
'https://downloads.apache.org/hadoop/common/KEYS' \
+ --verify-tar-gz \
+ "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
+
"hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
+ for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz |
grep -v ${HADOOP3_VERSION}); do
+ echo "Delete stale hadoop 3 cache ${stale}"
+ rm -rf $stale
+ done
+ '''
+ stash name: "hadoop-${HADOOP3_VERSION}", includes:
"hadoop-${HADOOP3_VERSION}-bin.tar.gz"
+ script {
+ if (env.HADOOP3_VERSIONS == env.HADOOP3_DEFAULT_VERSION) {
+ stash(name: "hadoop-3", includes:
"hadoop-${HADOOP3_VERSION}-bin.tar.gz")
+ } //if
+ } //script
+ } //stage ('Hadoop 3 cache inner stage')
+ } //for
+ } //script
+ } //steps
+ } //stage ('hadoop 3 cache') {
+ } //parallel
+ } //stage ('thirdparty installs')
stage ('init health results') {
steps {
// stash with given name for all tests we might run, so that we can
unstash all of them even if
@@ -191,6 +207,12 @@ pipeline {
stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes:
"${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes:
"${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
stash name: 'jdk17-hadoop3-result', allowEmpty: true, includes:
"${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/doesn't-match"
+ script {
+ for (hadoop3_version in hadoop3_versions) {
+ // confusing environment vs Groovy variables
+ stash(name: "jdk17-hadoop3-backwards-result-${hadoop3_version}",
allowEmpty: true, includes:
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${hadoop3_version}/doesn't-match")
+ }
+ }
stash name: 'srctarball-result', allowEmpty: true, includes:
"output-srctarball/doesn't-match"
}
}
@@ -748,6 +770,130 @@ pipeline {
}
}
}
+ // If/when we transition to transient runners, we could run every
Hadoop check as a matrix job
+ stage ('yetus jdk17 hadoop3 backwards compatibility checks') {
+ agent {
+ node {
+ label 'hbase'
+ }
+ }
+ environment {
+ BASEDIR = "${env.WORKSPACE}/component"
+ TESTS = "${env.DEEP_CHECKS}"
+ SET_JAVA_HOME = "/usr/lib/jvm/java-17"
+ // Activates hadoop 3.0 profile in maven runs.
+ HADOOP_PROFILE = '3.0'
+ // HADOOP_THREE_VERSION is set in script for loop
+ TEST_PROFILE = 'runDevTests'
+ SKIP_ERRORPRONE = true
+ }
+ steps {
+ script {
+ for (hadoop3_version in hadoop3_versions) {
+ //HADOOP_THREE_VERSION is the environment variable name
expected by the nightly shell script
+ env.HADOOP_THREE_VERSION = hadoop3_version;
+ env.OUTPUT_DIR_RELATIVE =
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${env.HADOOP_THREE_VERSION}"
+ env.OUTPUT_DIR =
"${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${env.HADOOP_THREE_VERSION}"
+ try {
+ stage ('yetus jdk17 hadoop3 backwards compatibility checks
inner stage') {
+ // Must do prior to anything else, since if one of them
timesout we'll stash the commentfile
+ sh '''#!/usr/bin/env bash
+ set -e
+ rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
+ rm -f "${OUTPUT_DIR}/commentfile"
+ '''
+ unstash 'yetus'
+ dir('component') {
+ checkout scm
+ }
+ sh '''#!/usr/bin/env bash
+ set -e
+ rm -rf "${OUTPUT_DIR}/machine" && mkdir
"${OUTPUT_DIR}/machine"
+ "${BASEDIR}/dev-support/gather_machine_environment.sh"
"${OUTPUT_DIR_RELATIVE}/machine"
+ echo "got the following saved stats in
'${OUTPUT_DIR_RELATIVE}/machine'"
+ ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
+ '''
+ script {
+ def ret = sh(
+ returnStatus: true,
+ script: '''#!/usr/bin/env bash
+ set -e
+ declare -i status=0
+ if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ;
then
+ echo '(/) {color:green}+1 jdk17 hadoop
${HADOOP_THREE_VERSION} backward compatibility checks{color}' >
"${OUTPUT_DIR}/commentfile"
+ else
+ echo '(x) {color:red}-1 jdk17 hadoop
${HADOOP_THREE_VERSION} backward compatibility checks{color}' >
"${OUTPUT_DIR}/commentfile"
+ status=1
+ fi
+ echo "-- For more information [see jdk17
report|${BUILD_URL}JDK17_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >>
"${OUTPUT_DIR}/commentfile"
+ exit "${status}"
+ '''
+ )
+ if (ret != 0) {
+ // mark the build as UNSTABLE instead of FAILURE, to
avoid skipping the later publish of
+ // test output. See HBASE-26339 for more details.
+ currentBuild.result = 'UNSTABLE'
+ }
+ } //script
+ } //stage ('yetus jdk17 hadoop3 backwards compatibility
checks inner stage') {
+ } //try
+ finally {
+ stash name:
"jdk17-hadoop3-backwards-result-${HADOOP_THREE_VERSION}", includes:
"${OUTPUT_DIR_RELATIVE}/commentfile"
+ junit testResults:
"${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
+ // zip surefire reports.
+ sh '''#!/bin/bash -e
+ if [ ! -f "${OUTPUT_DIR}/commentfile" ]; then
+ echo "(x) {color:red}-1 jdk17 hadoop
${HADOOP_THREE_VERSION} backward compatibility checks{color}"
>"${OUTPUT_DIR}/commentfile"
+ echo "-- Something went wrong running this stage, please
[check relevant console output|${BUILD_URL}/console]." >>
"${OUTPUT_DIR}/commentfile"
+ fi
+ if [ -d "${OUTPUT_DIR}/archiver" ]; then
+ count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
+ if [[ 0 -ne ${count} ]]; then
+ echo "zipping ${count} archived files"
+ zip -q -m -r "${OUTPUT_DIR}/test_logs.zip"
"${OUTPUT_DIR}/archiver"
+ else
+ echo "No archived files, skipping compressing."
+ fi
+ else
+ echo "No archiver directory, skipping compressing."
+ fi
+ '''
+ sshPublisher(publishers: [
+ sshPublisherDesc(configName: 'Nightlies',
+ transfers: [
+ sshTransfer(remoteDirectory:
"hbase/${JOB_NAME}/${BUILD_NUMBER}",
+ sourceFiles:
"${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+ )
+ ]
+ )
+ ])
+ // remove the big test logs zip file, store the nightlies
url in test_logs.html
+ sh '''#!/bin/bash -e
+ if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
+ echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving
space"
+ rm -rf "${OUTPUT_DIR}/test_logs.zip"
+ python3 ${BASEDIR}/dev-support/gen_redirect_html.py
"${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
+ else
+ echo "No test_logs.zip, skipping"
+ fi
+ '''
+ // Has to be relative to WORKSPACE.
+ archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
+ archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
+ publishHTML target: [
+ allowMissing : true,
+ keepAll : true,
+ alwaysLinkToLastBuild: true,
+ // Has to be relative to WORKSPACE.
+ reportDir : "${env.OUTPUT_DIR_RELATIVE}",
+ reportFiles : 'console-report.html',
+ reportName : "JDK17 Nightly Build Report (Hadoop
${HADOOP_THREE_VERSION} backwards compatibility)"
+ ]
+ } //finally
+ } // for
+ } //script
+ } //steps
+ } //stage ('yetus jdk17 hadoop3 backwards compatibility checks')
// This is meant to mimic what a release manager will do to create RCs.
// See http://hbase.apache.org/book.html#maven.release
@@ -782,8 +928,7 @@ pipeline {
# remove old hadoop tarballs in workspace
rm -rf hadoop-2*.tar.gz
rm -rf hadoop-3*.tar.gz
- echo "(x) {color:red}-1 source release artifact{color}\n--
Something went wrong with this stage, [check relevant console
output|${BUILD_URL}/console]." >output-srctarball/commentfile
- echo "(x) {color:red}-1 client integration test{color}\n--
Something went wrong with this stage, [check relevant console
output|${BUILD_URL}/console]." >output-integration/commentfile
+ rm -f "output-integration/commentfile"
'''
sh '''#!/usr/bin/env bash
set -e
@@ -855,70 +1000,90 @@ pipeline {
echo "(x) {color:red}-1 client integration
test{color}\n--Failed when running client tests on top of Hadoop 2. [see log
for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that
this means we didn't run on Hadoop 3)" >output-integration/commentfile
exit 2
fi
+ echo "(/) {color:green}+1 client integration test for HBase 2
{color}" >output-integration/commentfile
else
echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
fi
'''
- unstash 'hadoop-3'
- sh '''#!/bin/bash -e
- echo "Attempting to use run an instance on top of Hadoop 3."
- artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
- tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
- # we need to patch some files otherwise minicluster will fail to
start, see MAPREDUCE-7471
- ${BASEDIR}/dev-support/patch-hadoop3.sh hadoop-3
- hbase_install_dir="hbase-install"
- hbase_client_dir="hbase-client"
- if [ -d "hbase-hadoop3-install" ]; then
- echo "run hadoop3 client integration test against hbase
hadoop3 binaries"
- hbase_install_dir="hbase-hadoop3-install"
- hbase_client_dir="hbase-hadoop3-client"
- fi
- docker build -t hbase-integration-test -f
"${BASEDIR}/dev-support/docker/Dockerfile" .
- docker run --rm -v "${WORKSPACE}":/hbase -v
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
- -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
- -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED"
\
- --workdir=/hbase hbase-integration-test \
-
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
- --single-process \
- --working-dir output-integration/hadoop-3 \
- --hbase-client-install ${hbase_client_dir} \
- ${hbase_install_dir} \
- hadoop-3/bin/hadoop \
- hadoop-3/share/hadoop/yarn/timelineservice \
-
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
- hadoop-3/bin/mapred \
- >output-integration/hadoop-3.log 2>&1
- if [ $? -ne 0 ]; then
- echo "(x) {color:red}-1 client integration
test{color}\n--Failed when running client tests on top of Hadoop 3. [see log
for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that
this means we didn't check the Hadoop 3 shaded client)"
>output-integration/commentfile
- exit 2
- fi
- echo "Attempting to use run an instance on top of Hadoop 3,
relying on the Hadoop client artifacts for the example client program."
- docker run --rm -v "${WORKSPACE}":/hbase -v
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
- -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
- -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED"
\
- --workdir=/hbase hbase-integration-test \
-
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
- --single-process \
- --hadoop-client-classpath
hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar
\
- --working-dir output-integration/hadoop-3-shaded \
- --hbase-client-install ${hbase_client_dir} \
- ${hbase_install_dir} \
- hadoop-3/bin/hadoop \
- hadoop-3/share/hadoop/yarn/timelineservice \
-
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
- hadoop-3/bin/mapred \
- >output-integration/hadoop-3-shaded.log 2>&1
- if [ $? -ne 0 ]; then
- echo "(x) {color:red}-1 client integration
test{color}\n--Failed when running client tests on top of Hadoop 3 using
Hadoop's shaded client. [see log for
details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]."
>output-integration/commentfile
- exit 2
- fi
- echo "(/) {color:green}+1 client integration test{color}"
>output-integration/commentfile
- '''
- }
+ script {
+ for (hadoop3_version in hadoop3_versions) {
+ env.HADOOP3_VERSION = hadoop3_version;
+ echo "env.HADOOP3_VERSION" + env.hadoop3_version;
+ stage ("packaging and integration Hadoop 3 inner stage ") {
+ unstash "hadoop-" + env.HADOOP3_VERSION
+ sh '''#!/bin/bash -e
+ echo "Attempting to use run an instance on top of Hadoop
${HADOOP3_VERSION}."
+ # Clean up any previous tested Hadoop3 files before
unpacking the current one
+ rm -rf hadoop-3/*
+ # Create working dir
+ rm -rf "output-integration/hadoop-${HADOOP3_VERSION}" &&
mkdir "output-integration/hadoop-${HADOOP3_VERSION}"
+ rm -rf
"output-integration/hadoop-${HADOOP3_VERSION}-shaded" && mkdir
"output-integration/hadoop-${HADOOP3_VERSION}-shaded"
+ artifact=$(ls -1
"${WORKSPACE}"/hadoop-${HADOOP3_VERSION}-bin.tar.gz | head -n 1)
+ tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
+ # we need to patch some files otherwise minicluster will
fail to start, see MAPREDUCE-7471
+ ${BASEDIR}/dev-support/patch-hadoop3.sh hadoop-3
+ hbase_install_dir="hbase-install"
+ hbase_client_dir="hbase-client"
+ if [ -d "hbase-hadoop3-install" ]; then
+ echo "run hadoop3 client integration test against hbase
hadoop3 binaries"
+ hbase_install_dir="hbase-hadoop3-install"
+ hbase_client_dir="hbase-hadoop3-client"
+ fi
+ docker build -t hbase-integration-test -f
"${BASEDIR}/dev-support/docker/Dockerfile" .
+ docker run --rm -v "${WORKSPACE}":/hbase -v
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+ -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17"
\
+ -e HADOOP_OPTS="--add-opens
java.base/java.lang=ALL-UNNAMED" \
+ --workdir=/hbase hbase-integration-test \
+
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
+ --single-process \
+ --working-dir
output-integration/hadoop-${HADOOP3_VERSION} \
+ --hbase-client-install ${hbase_client_dir} \
+ ${hbase_install_dir} \
+ hadoop-3/bin/hadoop \
+ hadoop-3/share/hadoop/yarn/timelineservice \
+
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+ hadoop-3/bin/mapred \
+ >output-integration/hadoop-${HADOOP3_VERSION}.log 2>&1
+ if [ $? -ne 0 ]; then
+ echo "(x) {color:red}-1 client integration
test{color}\n--Failed when running client tests on top of Hadoop
${HADOOP3_VERSION}. [see log for
details|${BUILD_URL}/artifact/output-integration/hadoop-${HADOOP3_VERSION}.log].
(note that this means we didn't check the Hadoop ${HADOOP3_VERSION} shaded
client)" >> output-integration/commentfile
+ exit 2
+ fi
+ echo "Attempting to use run an instance on top of Hadoop
${HADOOP3_VERSION}, relying on the Hadoop client artifacts for the example
client program."
+ docker run --rm -v "${WORKSPACE}":/hbase -v
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+ -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17"
\
+ -e HADOOP_OPTS="--add-opens
java.base/java.lang=ALL-UNNAMED" \
+ --workdir=/hbase hbase-integration-test \
+
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
+ --single-process \
+ --hadoop-client-classpath
hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar
\
+ --working-dir
output-integration/hadoop-${HADOOP3_VERSION}-shaded \
+ --hbase-client-install ${hbase_client_dir} \
+ ${hbase_install_dir} \
+ hadoop-3/bin/hadoop \
+ hadoop-3/share/hadoop/yarn/timelineservice \
+
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+ hadoop-3/bin/mapred \
+
>output-integration/hadoop-${HADOOP3_VERSION}-shaded.log 2>&1
+ if [ $? -ne 0 ]; then
+ echo "(x) {color:red}-1 client integration
test{color}\n--Failed when running client tests on top of Hadoop
${HADOOP3_VERSION} using Hadoop's shaded client. [see log for
details|${BUILD_URL}/artifact/output-integration/hadoop-${HADOOP3_VERSION}-shaded.log]."
>> output-integration/commentfile
+ exit 2
+ fi
+ echo "(/) {color:green}+1 client integration test for
${HADOOP3_VERSION} {color}" >> output-integration/commentfile
+ '''
+ } //stage ("packaging and integration Hadoop 3 inner stage ")
+ } //for
+ } // script
+ } //steps
post {
always {
+ sh '''#!/bin/bash -e
+ if [ ! -f "output-integration/commentfile" ]; then
+ echo "(x) {color:red}-1 source release artifact{color}\n--
Something went wrong with this stage, [check relevant console
output|${BUILD_URL}/console]." >output-srctarball/commentfile
+ echo "(x) {color:red}-1 client integration test{color}\n--
Something went wrong with this stage, [check relevant console
output|${BUILD_URL}/console]." >output-integration/commentfile
+ fi
+ '''
stash name: 'srctarball-result', includes:
"output-srctarball/commentfile,output-integration/commentfile"
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
@@ -944,12 +1109,12 @@ pipeline {
archiveArtifacts artifacts: 'output-srctarball/**/*'
archiveArtifacts artifacts: 'output-integration/*'
archiveArtifacts artifacts: 'output-integration/**/*'
- }
- }
- }
- }
- }
- }
+ } //always
+ } //post
+ } //stage packaging
+ } // parallel
+ } //stage:_health checks
+ } //stages
post {
always {
script {
@@ -963,6 +1128,7 @@ pipeline {
rm -rf ${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}
rm -rf ${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}
rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}
+ rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-*
rm -rf output-srctarball
rm -rf output-integration
'''
@@ -977,9 +1143,13 @@ pipeline {
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
-
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/commentfile",
- 'output-srctarball/commentfile',
- 'output-integration/commentfile']
+
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/commentfile"]
+ for (hadoop3_version in hadoop3_versions) {
+ unstash("jdk17-hadoop3-backwards-result-${hadoop3_version}")
+
results.add("${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${hadoop3_version}/commentfile")
+ }
+ results.add('output-srctarball/commentfile')
+ results.add('output-integration/commentfile')
echo env.BRANCH_NAME
echo env.BUILD_URL
echo currentBuild.result
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index a6a3d95e937..46f08276c65 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -111,6 +111,14 @@ function personality_parse_args
delete_parameter "${i}"
HADOOP_PROFILE=${i#*=}
;;
+ --hadoop-three-version=*)
+ delete_parameter "${i}"
+ HADOOP_THREE_VERSION=${i#*=}
+ ;;
+ --test-profile=*)
+ delete_parameter "${i}"
+ TEST_PROFILE=${i#*=}
+ ;;
--skip-errorprone)
delete_parameter "${i}"
SKIP_ERRORPRONE=true
@@ -182,6 +190,11 @@ function personality_modules
extra="${extra} -Dhadoop.profile=${HADOOP_PROFILE}"
fi
+ # If we have HADOOP_THREE_VERSION specified it pass along in the
hadoop.version system property.
+ if [[ -n "${HADOOP_THREE_VERSION}" ]] ; then
+ extra="${extra} -Dhadoop-three.version=${HADOOP_THREE_VERSION}"
+ fi
+
# BUILDMODE value is 'full' when there is no patch to be tested, and we are
running checks on
# full source code instead. In this case, do full compiles, tests, etc
instead of per
# module.
@@ -233,8 +246,15 @@ function personality_modules
# tests respectively.
if [[ ${testtype} == unit ]]; then
local tests_arg=""
+
+ if [ -n "${TEST_PROFILE}" ]; then
+ extra="${extra} -P${TEST_PROFILE}"
+ else
+ extra="${extra} -PrunAllTests"
+ fi
+
get_include_exclude_tests_arg tests_arg
- extra="${extra} -PrunAllTests ${tests_arg}"
+ extra="${extra} ${tests_arg}"
# Inject the jenkins build-id for our surefire invocations
# Used by zombie detection stuff, even though we're not including that yet.
@@ -589,6 +609,7 @@ function hadoopcheck_rebuild
fi
if [[ "${PATCH_BRANCH}" = *"branch-2.5"* ]]; then
+ # TODO remove this on non 2.5 branches ?
yetus_info "Setting Hadoop 3 versions to test based on branch-2.5 rules"
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop3_versions="3.2.4 3.3.6 3.4.0"
@@ -597,6 +618,7 @@ function hadoopcheck_rebuild
fi
else
yetus_info "Setting Hadoop 3 versions to test based on
branch-2.6+/master/feature branch rules"
+ # Isn't runnung these tests with the default Hadoop version redundant ?
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop3_versions="3.3.6 3.4.0"
else
diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh
b/dev-support/hbase_nightly_pseudo-distributed-test.sh
index 9292222cf52..bbe5a70c664 100755
--- a/dev-support/hbase_nightly_pseudo-distributed-test.sh
+++ b/dev-support/hbase_nightly_pseudo-distributed-test.sh
@@ -281,7 +281,10 @@ trap cleanup EXIT SIGQUIT
echo "Starting up Hadoop"
-if [ "${hadoop_version%.*.*}" -gt 2 ]; then
+if [ "${hadoop_version%.*.*.*}" = "3.4.0" ]; then
+ # writeDetails is broken in 3.4.0. See MAPREDUCE-7492
+ "${mapred_exec}" minicluster -format -writeConfig
"${working_dir}/hbase-conf/core-site.xml"
>"${working_dir}/hadoop_cluster_command.out"
2>"${working_dir}/hadoop_cluster_command.err" &
+elif [ "${hadoop_version%.*.*}" -gt 2 ]; then
"${mapred_exec}" minicluster -format -writeConfig
"${working_dir}/hbase-conf/core-site.xml" -writeDetails
"${working_dir}/hadoop_cluster_info.json"
>"${working_dir}/hadoop_cluster_command.out"
2>"${working_dir}/hadoop_cluster_command.err" &
else
HADOOP_CLASSPATH="${timeline_service_dir}/*:${timeline_service_dir}/lib/*:${yarn_server_tests_test_jar}"
"${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format
-writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails
"${working_dir}/hadoop_cluster_info.json"
>"${working_dir}/hadoop_cluster_command.out"
2>"${working_dir}/hadoop_cluster_command.err" &
diff --git a/dev-support/hbase_nightly_yetus.sh
b/dev-support/hbase_nightly_yetus.sh
index 4cee1efde56..0f16827bfdd 100755
--- a/dev-support/hbase_nightly_yetus.sh
+++ b/dev-support/hbase_nightly_yetus.sh
@@ -74,7 +74,7 @@ if [[ -n "${INCLUDE_TESTS_URL}" ]]; then
YETUS_ARGS=("--include-tests-url=${INCLUDE_TESTS_URL}" "${YETUS_ARGS[@]}")
fi
-# For testing with specific hadoop version. Activates corresponding profile in
maven runs.
+# For testing with specific hadoop major version. Activates corresponding
profile in maven runs.
if [[ -n "${HADOOP_PROFILE}" ]]; then
# Master has only Hadoop3 support. We don't need to activate any profile.
# The Jenkinsfile should not attempt to run any Hadoop2 tests.
@@ -83,6 +83,16 @@ if [[ -n "${HADOOP_PROFILE}" ]]; then
fi
fi
+# For testing with specific hadoop version. Activates corresponding profile in
maven runs.
+if [[ -n "${HADOOP_THREE_VERSION}" ]]; then
+ YETUS_ARGS=("--hadoop-three-version=${HADOOP_THREE_VERSION}"
"${YETUS_ARGS[@]}")
+fi
+
+if [[ -n "${TEST_PROFILE}" ]]; then
+ # i.e. runAllTests / runDevTests
+ YETUS_ARGS=("--test-profile=${TEST_PROFILE}" "${YETUS_ARGS[@]}")
+fi
+
if [[ "${SKIP_ERRORPRONE}" = "true" ]]; then
YETUS_ARGS=("--skip-errorprone" "${YETUS_ARGS[@]}")
fi