This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch release-0.12.3
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/release-0.12.3 by this push:
new 2a9a7193d80 [MINOR] Release 0.12.3 script update (#8588)
2a9a7193d80 is described below
commit 2a9a7193d80d5aecba0a360aedbb5ee7f0cf8e70
Author: Sivabalan Narayanan <[email protected]>
AuthorDate: Fri Mar 1 11:26:23 2024 -0800
[MINOR] Release 0.12.3 script update (#8588)
---
scripts/release/deploy_staging_jars.sh | 83 +++++++++++++++---------------
scripts/release/validate_staged_bundles.sh | 52 +++++++------------
2 files changed, 61 insertions(+), 74 deletions(-)
diff --git a/scripts/release/deploy_staging_jars.sh
b/scripts/release/deploy_staging_jars.sh
index 3d6710ff592..6a10f7ed4de 100755
--- a/scripts/release/deploy_staging_jars.sh
+++ b/scripts/release/deploy_staging_jars.sh
@@ -36,42 +36,51 @@ if [ "$#" -gt "1" ]; then
exit 1
fi
-BUNDLE_MODULES=$(find -s packaging -name 'hudi-*-bundle' -type d)
-BUNDLE_MODULES_EXCLUDED="-${BUNDLE_MODULES//$'\n'/,-}"
-BUNDLE_MODULES_EXCLUDED="-packaging/hudi-aws-bundle,-packaging/hudi-datahub-sync-bundle,-packaging/hudi-flink-bundle,-packaging/hudi-gcp-bundle,-packaging/hudi-integ-test-bundle,-packaging/hudi-kafka-connect-bundle"
-
declare -a ALL_VERSION_OPTS=(
-# upload all module jars and bundle jars
-"-Dscala-2.11 -Dspark2.4 -pl $BUNDLE_MODULES_EXCLUDED"
-"-Dscala-2.12 -Dspark2.4 -pl $BUNDLE_MODULES_EXCLUDED"
-"-Dscala-2.12 -Dspark3.3 -pl $BUNDLE_MODULES_EXCLUDED"
-"-Dscala-2.12 -Dspark3.2 -pl $BUNDLE_MODULES_EXCLUDED"
-"-Dscala-2.12 -Dspark3.1" # this profile goes last in this section to ensure
bundles use avro 1.8
-
-# spark bundles
-"-Dscala-2.11 -Dspark2.4 -pl packaging/hudi-spark-bundle -am"
+# Upload Spark specific modules and bundle jars
+# For Spark 2.4, Scala 2.11:
+# hudi-spark-common_2.11
+# hudi-spark_2.11
+# hudi-spark2_2.11
+# hudi-utilities_2.11
+# hudi-spark2.4-bundle_2.11
+# hudi-utilities-bundle_2.11
+# hudi-utilities-slim-bundle_2.11
+"-Dscala-2.11 -Dspark2.4 -pl
hudi-spark-datasource/hudi-spark-common,hudi-spark-datasource/hudi-spark2,hudi-spark-datasource/hudi-spark,hudi-utilities,packaging/hudi-spark-bundle,packaging/hudi-utilities-bundle,packaging/hudi-utilities-slim-bundle
-am"
+# For Spark 2.4, Scala 2.12:
+# hudi-spark2.4-bundle_2.12
"-Dscala-2.12 -Dspark2.4 -pl packaging/hudi-spark-bundle -am"
-"-Dscala-2.12 -Dspark3.3 -pl packaging/hudi-spark-bundle -am"
-"-Dscala-2.12 -Dspark3.2 -pl packaging/hudi-spark-bundle -am"
-"-Dscala-2.12 -Dspark3.1 -pl packaging/hudi-spark-bundle -am"
-
-# spark bundles (legacy) (not overwriting previous uploads as these jar names
are unique)
+# For Spark 3.2, Scala 2.12:
+# hudi-spark3.2.x_2.12
+# hudi-spark3.2plus-common
+# hudi-spark3.2-bundle_2.12
+"-Dscala-2.12 -Dspark3.2 -pl
hudi-spark-datasource/hudi-spark3.2.x,hudi-spark-datasource/hudi-spark3.2plus-common,packaging/hudi-spark-bundle
-am"
+# For Spark 3.1, Scala 2.12:
+# All other modules and bundles using avro 1.8
+# hudi-spark-common_2.12
+# hudi-spark_2.12
+# hudi-spark3_2.12
+# hudi-utilities_2.12
+# hudi-spark3.1-bundle_2.12
+# hudi-utilities-bundle_2.12
+# hudi-utilities-slim-bundle_2.12
+# all other non versioned bundles like hudi-common, hudi-examples, etc.
+"-Dscala-2.12 -Dspark3.1"
+# For Spark 3.3, Scala 2.12:
+# hudi-spark3.3.x_2.12
+# hudi-spark3.3-bundle_2.12
+"-Dscala-2.12 -Dspark3.3 -pl
hudi-spark-datasource/hudi-spark3.3.x,packaging/hudi-spark-bundle -am"
+
+# Upload legacy Spark bundles (not overwriting previous uploads as these jar
names are unique)
"-Dscala-2.11 -Dspark2 -pl packaging/hudi-spark-bundle -am" # for legacy
bundle name hudi-spark-bundle_2.11
"-Dscala-2.12 -Dspark2 -pl packaging/hudi-spark-bundle -am" # for legacy
bundle name hudi-spark-bundle_2.12
"-Dscala-2.12 -Dspark3 -pl packaging/hudi-spark-bundle -am" # for legacy
bundle name hudi-spark3-bundle_2.12
-# utilities bundles (legacy) (overwriting previous uploads)
-"-Dscala-2.11 -Dspark2.4 -pl packaging/hudi-utilities-bundle -am" #
hudi-utilities-bundle_2.11 is for spark 2.4 only
-"-Dscala-2.12 -Dspark3.1 -pl packaging/hudi-utilities-bundle -am" #
hudi-utilities-bundle_2.12 is for spark 3.1 only
-
-# utilities slim bundles
-"-Dscala-2.11 -Dspark2.4 -pl packaging/hudi-utilities-slim-bundle -am" #
hudi-utilities-slim-bundle_2.11
-"-Dscala-2.12 -Dspark3.1 -pl packaging/hudi-utilities-slim-bundle -am" #
hudi-utilities-slim-bundle_2.12
-
-# flink bundles (overwriting previous uploads)
+# Upload Flink bundles (overwriting previous uploads)
"-Dscala-2.12 -Dflink1.13 -Davro.version=1.10.0 -pl
packaging/hudi-flink-bundle -am"
"-Dscala-2.12 -Dflink1.14 -Davro.version=1.10.0 -pl
packaging/hudi-flink-bundle -am"
"-Dscala-2.12 -Dflink1.15 -Davro.version=1.10.0 -pl
packaging/hudi-flink-bundle -am"
+"-Dscala-2.12 -Dflink1.16 -Davro.version=1.10.0 -pl
packaging/hudi-flink-bundle -am"
)
printf -v joined "'%s'\n" "${ALL_VERSION_OPTS[@]}"
@@ -102,24 +111,14 @@ elif [ "$#" == "1" ]; then
fi
COMMON_OPTIONS="-DdeployArtifacts=true -DskipTests
-DretryFailedDeploymentCount=10"
-
for v in "${ALL_VERSION_OPTS[@]}"
do
# TODO: consider cleaning all modules by listing directories instead of
specifying profile
- if [[ "$v" == *"$BUNDLE_MODULES_EXCLUDED" ]]; then
- # When deploying jars with bundle exclusions, we still need to build the
bundles,
- # by removing "-pl -packaging/hudi-aws-bundle...", otherwise the build
fails.
- v1=${v%${BUNDLE_MODULES_EXCLUDED}}
- echo "Cleaning everything before any deployment"
- $MVN clean $COMMON_OPTIONS ${v1%-pl }
- echo "Building with options ${v1%-pl }"
- $MVN install $COMMON_OPTIONS ${v1%-pl }
- else
- echo "Cleaning everything before any deployment"
- $MVN clean $COMMON_OPTIONS ${v}
- echo "Building with options ${v}"
- $MVN install $COMMON_OPTIONS ${v}
- fi
+ echo "Cleaning everything before any deployment"
+ $MVN clean $COMMON_OPTIONS ${v}
+ echo "Building with options ${v}"
+ $MVN install $COMMON_OPTIONS ${v}
+
echo "Deploying to repository.apache.org with version options ${v%-am}"
# remove `-am` option to only deploy intended modules
$MVN deploy $COMMON_OPTIONS ${v%-am}
diff --git a/scripts/release/validate_staged_bundles.sh
b/scripts/release/validate_staged_bundles.sh
index 9df615c8141..8f74168cb35 100755
--- a/scripts/release/validate_staged_bundles.sh
+++ b/scripts/release/validate_staged_bundles.sh
@@ -28,41 +28,29 @@ VERSION=$2
STAGING_REPO="https://repository.apache.org/content/repositories/${REPO}/org/apache/hudi"
-declare -a BUNDLE_URLS=(
-"${STAGING_REPO}/hudi-aws-bundle/${VERSION}/hudi-aws-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-datahub-sync-bundle/${VERSION}/hudi-datahub-sync-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-flink1.13-bundle/${VERSION}/hudi-flink1.13-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-flink1.14-bundle/${VERSION}/hudi-flink1.14-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-flink1.15-bundle/${VERSION}/hudi-flink1.15-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-gcp-bundle/${VERSION}/hudi-gcp-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-hadoop-mr-bundle/${VERSION}/hudi-hadoop-mr-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-hive-sync-bundle/${VERSION}/hudi-hive-sync-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-integ-test-bundle/${VERSION}/hudi-integ-test-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-kafka-connect-bundle/${VERSION}/hudi-kafka-connect-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-metaserver-server-bundle/${VERSION}/hudi-metaserver-server-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-presto-bundle/${VERSION}/hudi-presto-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark-bundle_2.11/${VERSION}/hudi-spark-bundle_2.11-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark-bundle_2.12/${VERSION}/hudi-spark-bundle_2.12-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark2.4-bundle_2.11/${VERSION}/hudi-spark2.4-bundle_2.11-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark2.4-bundle_2.12/${VERSION}/hudi-spark2.4-bundle_2.12-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark3-bundle_2.12/${VERSION}/hudi-spark3-bundle_2.12-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark3.1-bundle_2.12/${VERSION}/hudi-spark3.1-bundle_2.12-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark3.2-bundle_2.12/${VERSION}/hudi-spark3.2-bundle_2.12-${VERSION}.jar"
-"${STAGING_REPO}/hudi-spark3.3-bundle_2.12/${VERSION}/hudi-spark3.3-bundle_2.12-${VERSION}.jar"
-"${STAGING_REPO}/hudi-timeline-server-bundle/${VERSION}/hudi-timeline-server-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-trino-bundle/${VERSION}/hudi-trino-bundle-${VERSION}.jar"
-"${STAGING_REPO}/hudi-utilities-bundle_2.11/${VERSION}/hudi-utilities-bundle_2.11-${VERSION}.jar"
-"${STAGING_REPO}/hudi-utilities-bundle_2.12/${VERSION}/hudi-utilities-bundle_2.12-${VERSION}.jar"
-"${STAGING_REPO}/hudi-utilities-slim-bundle_2.11/${VERSION}/hudi-utilities-slim-bundle_2.11-${VERSION}.jar"
-"${STAGING_REPO}/hudi-utilities-slim-bundle_2.12/${VERSION}/hudi-utilities-slim-bundle_2.12-${VERSION}.jar"
-)
+declare -a extensions=("-javadoc.jar" "-javadoc.jar.asc" "-javadoc.jar.md5"
"-javadoc.jar.sha1" "-sources.jar"
+"-sources.jar.asc" "-sources.jar.md5" "-sources.jar.sha1" ".jar" ".jar.asc"
".jar.md5" ".jar.sha1" ".pom" ".pom.asc"
+".pom.md5" ".pom.sha1")
+
+declare -a bundles=("hudi-aws-bundle" "hudi-datahub-sync-bundle"
"hudi-flink1.13-bundle" "hudi-flink1.14-bundle"
+"hudi-flink1.15-bundle" "hudi-gcp-bundle" "hudi-hadoop-mr-bundle"
"hudi-hive-sync-bundle" "hudi-integ-test-bundle"
+"hudi-kafka-connect-bundle" "hudi-presto-bundle" "hudi-spark-bundle_2.11"
"hudi-spark-bundle_2.12"
+"hudi-spark2.4-bundle_2.11" "hudi-spark2.4-bundle_2.12"
"hudi-spark3-bundle_2.12" "hudi-spark3.1-bundle_2.12"
+"hudi-spark3.2-bundle_2.12" "hudi-spark3.3-bundle_2.12"
"hudi-timeline-server-bundle" "hudi-trino-bundle"
+"hudi-utilities-bundle_2.11" "hudi-utilities-bundle_2.12"
"hudi-utilities-slim-bundle_2.11"
+"hudi-utilities-slim-bundle_2.12")
NOW=$(date +%s)
TMP_DIR_FOR_BUNDLES=/tmp/${NOW}
mkdir "$TMP_DIR_FOR_BUNDLES"
-for url in "${BUNDLE_URLS[@]}"; do
- echo "downloading $url"
- wget "$url" -P "$TMP_DIR_FOR_BUNDLES"
+
+for bundle in "${bundles[@]}"
+do
+ for extension in "${extensions[@]}"
+ do
+ echo "downloading
${STAGING_REPO}/$bundle/${VERSION}/$bundle-${VERSION}$extension"
+ wget "${STAGING_REPO}/$bundle/${VERSION}/$bundle-${VERSION}$extension"
-P "$TMP_DIR_FOR_BUNDLES"
+ done
done
-ls -l "$TMP_DIR_FOR_BUNDLES"
+ls -l "$TMP_DIR_FOR_BUNDLES/"