This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-29930
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/HBASE-29930 by this push:
     new 2faadff7fb9 fix
2faadff7fb9 is described below

commit 2faadff7fb95469336ee34faa4d5655f5f8f0010
Author: Duo Zhang <[email protected]>
AuthorDate: Sat Feb 28 19:38:02 2026 +0800

    fix
---
 .../integration-test/integration-test.Jenkinsfile  | 46 ++--------------------
 1 file changed, 4 insertions(+), 42 deletions(-)

diff --git a/dev-support/integration-test/integration-test.Jenkinsfile 
b/dev-support/integration-test/integration-test.Jenkinsfile
index 9b9286600ab..c5daae18c93 100644
--- a/dev-support/integration-test/integration-test.Jenkinsfile
+++ b/dev-support/integration-test/integration-test.Jenkinsfile
@@ -111,6 +111,10 @@ pipeline {
             values getHadoopVersions(env.HADOOP2_VERSIONS, 
env.HADOOP3_VERSIONS)
           }
         }
+        environment {
+          BASEDIR = "${env.WORKSPACE}/component"
+          OUTPUT_DIR = "output-integration-hadoop-${env.HADOOP_VERSION}"
+        }
         stages {
           stage('scm-checkout') {
             steps {
@@ -131,48 +135,6 @@ pipeline {
               }
             }
           }
-          stage('download hadoop') {
-            steps {
-              dir("downloads-hadoop") {
-                sh '''#!/bin/bash -e
-                  echo "Make sure we have a directory for downloading 
dependencies: $(pwd)"
-                '''
-                sh '''#!/bin/bash -e
-                  echo "Ensure we have a copy of Hadoop ${HADOOP_VERSION}"
-                  
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
 \
-                    --working-dir "${WORKSPACE}/downloads-hadoop" \
-                    --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
-                    --verify-tar-gz \
-                    "${WORKSPACE}/hadoop-${HADOOP_VERSION}-bin.tar.gz" \
-                    
"hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz"
-                  for stale in $(ls -1 "${WORKSPACE}"/hadoop-*.tar.gz | grep 
-v ${HADOOP_VERSION}); do
-                    echo "Delete stale hadoop cache ${stale}"
-                    rm -rf $stale
-                  done
-                '''
-              } // dir
-            } // steps
-          } // download hadoop
-          stage('integration test ') {
-            steps {
-              unstash 'hbase-install'
-              sh'''#!/bin/bash -e
-                install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v client-bin | grep -v hadoop3)
-                tar --strip-component=1 -xzf "${install_artifact}" -C 
"hbase-install"
-                client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz
 | grep -v hadoop3)
-                tar --strip-component=1 -xzf "${client_artifact}" -C 
"hbase-client"
-                if [ 2 -eq $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | wc -l) ]; then
-                  echo "hadoop3 artifacts available, unpacking the hbase 
hadoop3 bin tarball into 'hbase-hadoop3-install' and the client hadoop3 tarball 
into 'hbase-hadoop3-client'"
-                  mkdir hbase-hadoop3-install
-                  mkdir hbase-hadoop3-client
-                  hadoop3_install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | grep -v client-bin)
-                  tar --strip-component=1 -xzf "${hadoop3_install_artifact}" 
-C "hbase-hadoop3-install"
-                  hadoop3_client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-client-bin.tar.gz)
-                  tar --strip-component=1 -xzf "${hadoop3_client_artifact}" -C 
"hbase-hadoop3-client"
-                fi
-              '''
-            } // steps
-          } // integration test
         } // stages
       } // matrix
     } // integration test matrix

Reply via email to