This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-29914
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit f9246cfc04d88379b8b534e805c8fd576f27358e
Author: Duo Zhang <[email protected]>
AuthorDate: Mon Feb 23 12:02:35 2026 +0800

    HBASE-29914 Client integration tests fails with 'mvn not found'
---
 .../hbase_nightly_pseudo-distributed-test.sh       |  4 +-
 dev-support/hbase_nightly_source-artifact.sh       | 87 +++++++++++++++++-----
 dev-support/patch-hadoop3.sh                       | 24 ++++++
 3 files changed, 96 insertions(+), 19 deletions(-)

diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh 
b/dev-support/hbase_nightly_pseudo-distributed-test.sh
index 923341ab43e..51905775dda 100755
--- a/dev-support/hbase_nightly_pseudo-distributed-test.sh
+++ b/dev-support/hbase_nightly_pseudo-distributed-test.sh
@@ -512,11 +512,11 @@ public class HBaseClientReadWriteExample {
 }
 EOF
 redirect_and_run "${working_dir}/hbase-shaded-client-compile" \
-    javac -cp 
"${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hadoop_jars}"
 "${working_dir}/HBaseClientReadWriteExample.java"
+    $JAVA_HOME/bin/javac -cp 
"${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hadoop_jars}"
 "${working_dir}/HBaseClientReadWriteExample.java"
 echo "Running shaded client example. It'll fetch the set of regions, 
round-trip them to a file in HDFS, then write them one-per-row into the test 
table."
 # The order of classpath entries here is important. if we're using non-shaded 
Hadoop 3 / 2.9.0 jars, we have to work around YARN-2190.
 redirect_and_run "${working_dir}/hbase-shaded-client-example" \
-    java -cp 
"${working_dir}/hbase-conf/:${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hbase_dep_classpath}:${working_dir}:${hadoop_jars}"
 HBaseClientReadWriteExample
+    $JAVA_HOME/bin/java -cp 
"${working_dir}/hbase-conf/:${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hbase_dep_classpath}:${working_dir}:${hadoop_jars}"
 HBaseClientReadWriteExample
 
 echo "Checking on results of example program."
 "${hadoop_exec}" --config "${working_dir}/hbase-conf/" fs -copyToLocal 
"example-region-listing.data" "${working_dir}/example-region-listing.data"
diff --git a/dev-support/hbase_nightly_source-artifact.sh 
b/dev-support/hbase_nightly_source-artifact.sh
index 5d9902e5f04..372c39b8f88 100755
--- a/dev-support/hbase_nightly_source-artifact.sh
+++ b/dev-support/hbase_nightly_source-artifact.sh
@@ -16,7 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-set -e
 function usage {
   echo "Usage: ${0} [options] /path/to/component/checkout"
   echo ""
@@ -34,6 +33,11 @@ function usage {
   echo "                                          a git checkout, including 
ignored files."
   exit 1
 }
+
+MVN="mvn"
+if ! command -v mvn &>/dev/null; then
+  MVN=$MAVEN_HOME/bin/mvn
+fi
 # if no args specified, show usage
 if [ $# -lt 1 ]; then
   usage
@@ -125,7 +129,7 @@ fi
 # See http://hbase.apache.org/book.html#maven.release
 
 echo "Maven details, in case our JDK doesn't match expectations:"
-mvn --version --offline | tee "${working_dir}/maven_version"
+${MVN} --version --offline | tee "${working_dir}/maven_version"
 
 echo "Do a clean building of the source artifact using code in 
${component_dir}"
 cd "${component_dir}"
@@ -170,20 +174,69 @@ else
   echo "Everything looks as expected."
 fi
 
+function get_hadoop3_version {
+  local version="$1"
+  if [[ "${version}" =~ -SNAPSHOT$ ]]; then
+    echo "${version/-SNAPSHOT/-hadoop3-SNAPSHOT}"
+  else
+    echo "${version}-hadoop3"
+  fi
+}
+
+function build_tarball {
+  local build_hadoop3=$1
+  local mvn_extra_args=""
+  local build_log="srctarball_install.log"
+  local tarball_glob="hbase-*-bin.tar.gz"
+  if [ $build_hadoop3 -ne 0 ]; then
+    local version=$(${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:evaluate 
-Dexpression=project.version -q -DforceStdout)
+    local hadoop3_version=$(get_hadoop3_version $version)
+    mvn_extra_args="-Drevision=${hadoop3_version} -Dhadoop.profile=3.0"
+    build_log="hadoop3_srctarball_install.log"
+    tarball_glob="hbase-*-hadoop3-*-bin.tar.gz"
+    echo "Follow the ref guide section on making a RC: Step 8 Build the 
hadoop3 binary tarball."
+  else
+    echo "Follow the ref guide section on making a RC: Step 7 Build the binary 
tarball."
+  fi
+  if ${MVN} --threads=2 -DskipTests -Prelease --batch-mode 
-Dmaven.repo.local="${m2_tarbuild}" ${mvn_extra_args} clean install \
+    assembly:single >"${working_dir}/${build_log}" 2>&1; then
+    for artifact in "${unpack_dir}"/hbase-assembly/target/${tarball_glob}; do
+      if [ -f "${artifact}" ]; then
+        # TODO check the layout of the binary artifact we just made.
+        echo "Building a binary tarball from the source tarball succeeded."
+        return 0
+      fi
+    done
+  fi
+
+  echo "Building a binary tarball from the source tarball failed. see 
${working_dir}/${build_log} for details."
+  # Copy up the rat.txt to the working dir so available in build archive in 
case rat complaints.
+  # rat.txt can be under any module target dir... copy them all up renaming 
them to include parent dir as we go.
+  find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v 
"$NAME" "${working_dir}/${NAME//\//_}"; done
+  return 1
+}
+
 cd "${unpack_dir}"
-echo "Follow the ref guide section on making a RC: Step 8 Build the binary 
tarball."
-if mvn --threads=2 -DskipTests -Prelease --batch-mode 
-Dmaven.repo.local="${m2_tarbuild}" clean install \
-    assembly:single >"${working_dir}/srctarball_install.log" 2>&1; then
-  for artifact in "${unpack_dir}"/hbase-assembly/target/hbase-*-bin.tar.gz; do
-    if [ -f "${artifact}" ]; then
-      # TODO check the layout of the binary artifact we just made.
-      echo "Building a binary tarball from the source tarball succeeded."
-      exit 0
-    fi
-  done
+
+${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:active-profiles | grep -q 
hadoop-3.0
+if [ $? -ne 0 ]; then
+  echo "The hadoop-3.0 profile is not activated by default, build a default 
tarball first."
+  # use java 8 to build with hadoop2
+  JAVA_HOME="/usr/lib/jvm/java-8" build_tarball 0
+  if [ $? -ne 0 ]; then
+    exit 1
+  fi
+
+  # move the previous tarballs out, so it will not be cleaned while building 
against hadoop3
+  mv "${unpack_dir}"/hbase-assembly/target/hbase-*-bin.tar.gz "${unpack_dir}"/
+  echo "build a hadoop3 tarball."
+  build_tarball 1
+  if [ $? -ne 0 ]; then
+    exit 1
+  fi
+  # move tarballs back
+  mv "${unpack_dir}"/hbase-*-bin.tar.gz "${unpack_dir}"/hbase-assembly/target/
+else
+  echo "The hadoop-3.0 profile is activated by default, build a default 
tarball."
+  build_tarball 0
 fi
-echo "Building a binary tarball from the source tarball failed. see 
${working_dir}/srctarball_install.log for details."
-# Copy up the rat.txt to the working dir so available in build archive in case 
rat complaints.
-# rat.txt can be under any module target dir... copy them all up renaming them 
to include parent dir as we go.
-find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v 
"$NAME" "${working_dir}/${NAME//\//_}"; done
-exit 1
diff --git a/dev-support/patch-hadoop3.sh b/dev-support/patch-hadoop3.sh
new file mode 100755
index 00000000000..fed5a535bcd
--- /dev/null
+++ b/dev-support/patch-hadoop3.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+##
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+
+hadoop_dir=$1
+
+sed -i 
"s/HADOOP_TOOLS_DIR=\${HADOOP_TOOLS_DIR:-\"share\/hadoop\/tools\"}/HADOOP_TOOLS_DIR=\${HADOOP_TOOLS_DIR:-\"\$HADOOP_TOOLS_HOME\/share\/hadoop\/tools\"}/g"
 "$hadoop_dir/libexec/hadoop-functions.sh"
+sed -i "/hadoop_add_classpath \"\${junitjar}\"/a mockitojar=\$(echo 
\"\${HADOOP_TOOLS_LIB_JARS_DIR}\"\/mockito-core-[0-9]*.jar)\nhadoop_add_classpath
 \"\${mockitojar}\"" "$hadoop_dir/bin/mapred"
+curl 
https://repo1.maven.org/maven2/org/mockito/mockito-core/2.28.2/mockito-core-2.28.2.jar
 -o "$hadoop_dir/share/hadoop/tools/lib/mockito-core-2.28.2.jar"

Reply via email to