This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-29930
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/HBASE-29930 by this push:
     new d8528cc04ea fix
d8528cc04ea is described below

commit d8528cc04ea323b77f65929909ae9e208ac7842b
Author: Duo Zhang <[email protected]>
AuthorDate: Sat Feb 28 15:36:40 2026 +0800

    fix
---
 .../integration-test/integration-test.Jenkinsfile  | 86 ++++++++++++++--------
 1 file changed, 55 insertions(+), 31 deletions(-)

diff --git a/dev-support/integration-test/integration-test.Jenkinsfile 
b/dev-support/integration-test/integration-test.Jenkinsfile
index ec215ace464..ac95c894dec 100644
--- a/dev-support/integration-test/integration-test.Jenkinsfile
+++ b/dev-support/integration-test/integration-test.Jenkinsfile
@@ -23,41 +23,65 @@ pipeline {
     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
   }
   stages {
-    stage ('thirdparty installs') {
+    // This is meant to mimic what a release manager will do to create RCs.
+    // See http://hbase.apache.org/book.html#maven.release
+    // TODO (HBASE-23870): replace this with invocation of the release tool
+    stage ('packaging test') {
+      environment {
+        BASEDIR = "${env.WORKSPACE}/component"
+      }
       steps {
         dir('component') {
           checkout scm
         }
-        script {
-          for (hadoop_version in getHadoopVersions(env.HADOOP2_VERSIONS, 
env.HADOOP3_VERSIONS)) {
-            echo "Download hadoop-" + hadoop_version
-            env.HADOOP_VERSION = hadoop_version
-            stage ('hadoop cache inner stage') {
-              dir("downloads-hadoop-${HADOOP_VERSION}") {
-                sh '''#!/usr/bin/env bash
-                  echo "Make sure we have a directory for downloading 
dependencies: $(pwd)"
-                '''
-              } //dir
-              sh '''#!/usr/bin/env bash
-                set -e
-                echo "Ensure we have a copy of Hadoop ${HADOOP_VERSION}"
-                
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
 \
-                  --working-dir 
"${WORKSPACE}/downloads-hadoop-${HADOOP_VERSION}" \
-                  --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
-                  --verify-tar-gz \
-                  "${WORKSPACE}/hadoop-${HADOOP_VERSION}-bin.tar.gz" \
-                  
"hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz"
-                for stale in $(ls -1 "${WORKSPACE}"/hadoop-*.tar.gz | grep -v 
${HADOOP_VERSION}); do
-                  echo "Delete stale hadoop cache ${stale}"
-                  rm -rf $stale
-                done
-              '''
-              stash name: "hadoop-${HADOOP3_VERSION}", includes: 
"hadoop-${HADOOP_VERSION}-bin.tar.gz"
-            } // hadoop cache inner stage
-          } // for
-        } // script
-      } // steps
-    } // thirdparty installs
+        sh '''#!/bin/bash -e
+          echo "Setting up directories"
+          rm -rf "output-srctarball" && mkdir "output-srctarball"
+          rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
+          rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
+          rm -rf ".m2-for-src" && mkdir ".m2-for-src"
+        '''
+        sh '''#!/usr/bin/env bash
+          set -e
+          rm -rf "output-srctarball/machine" && mkdir 
"output-srctarball/machine"
+          "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"output-srctarball/machine"
+          echo "got the following saved stats in 'output-srctarball/machine'"
+          ls -lh "output-srctarball/machine"
+        '''
+        sh '''#!/bin/bash -e
+          echo "Checking the steps for an RM to make a source artifact, then a 
binary artifact."
+          docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+          docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+            -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" 
--workdir=/hbase hbase-integration-test \
+            "component/dev-support/hbase_nightly_source-artifact.sh" \
+            --intermediate-file-dir output-srctarball \
+            --unpack-temp-dir unpacked_src_tarball \
+            --maven-m2-initial .m2-for-repo \
+            --maven-m2-src-build .m2-for-src \
+            --clean-source-checkout \
+            component
+          if [ $? -eq 0 ]; then
+            echo '(/) {color:green}+1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
+          else
+            echo '(x) {color:red}-1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
+            exit 1
+          fi
+        '''
+        echo "make sure we have proper hbase tarballs under hbase-assembly"
+        sh '''#!/bin/bash -e
+          if [ 2 -ne $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v hadoop3 | wc -l) ]; then
+            echo '(x) {color:red}-1 testing binary artifact{color}\n-- source 
tarball did not produce the expected binaries.' >>output-srctarball/commentfile
+          exit 1
+        fi
+        if [[ "${BRANCH_NAME}" == *"branch-2"* ]]; then
+          if [ 2 -eq $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | wc -l) ]; then
+             echo '(x) {color:red}-1 testing binary artifact{color}\n-- source 
tarball did not produce the expected hadoop3 binaries.' 
>>output-srctarball/commentfile
+          fi
+        fi
+      '''
+      stash name: 'hbase-install', includes: 
"${env.WORKSPACE}/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz"
+    }
+  }
   } // stages
   post {
     always {

Reply via email to