This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 914eb16f2b0 [HUDI-7085] Update release scripts (#10072)
914eb16f2b0 is described below

commit 914eb16f2b0d607affb861d56752694042bc2ebf
Author: Y Ethan Guo <[email protected]>
AuthorDate: Sun Nov 12 19:51:29 2023 -0800

    [HUDI-7085] Update release scripts (#10072)
---
 scripts/release/deploy_staging_jars.sh     | 18 +++++++++++++-----
 scripts/release/validate_staged_bundles.sh | 15 ++++++++++-----
 scripts/release/validate_staged_release.sh |  2 +-
 3 files changed, 24 insertions(+), 11 deletions(-)

diff --git a/scripts/release/deploy_staging_jars.sh 
b/scripts/release/deploy_staging_jars.sh
index 8829c7d2516..4e54d30a2a9 100755
--- a/scripts/release/deploy_staging_jars.sh
+++ b/scripts/release/deploy_staging_jars.sh
@@ -51,19 +51,27 @@ declare -a ALL_VERSION_OPTS=(
 # For Spark 2.4, Scala 2.12:
 # hudi-spark2.4-bundle_2.12
 "-Dscala-2.12 -Dspark2.4 -pl packaging/hudi-spark-bundle -am"
+# For Spark 3.0, Scala 2.12:
+# hudi-spark3.0.x_2.12
+# hudi-spark3.0-bundle_2.12
+"-Dscala-2.12 -Dspark3.0 -pl 
hudi-spark-datasource/hudi-spark3.0.x,packaging/hudi-spark-bundle -am"
 # For Spark 3.2, Scala 2.12:
 # hudi-spark3.2.x_2.12
 # hudi-spark3.2plus-common
 # hudi-spark3.2-bundle_2.12
 "-Dscala-2.12 -Dspark3.2 -pl 
hudi-spark-datasource/hudi-spark3.2.x,hudi-spark-datasource/hudi-spark3.2plus-common,packaging/hudi-spark-bundle
 -am"
-# For Spark 3.1, Scala 2.12:
-# All other modules and bundles using avro 1.8
-"-Dscala-2.12 -Dspark3.1"
 # For Spark 3.3, Scala 2.12:
 # hudi-spark3.3.x_2.12
+# hudi-spark3.2-bundle_2.12
+"-Dscala-2.12 -Dspark3.3 -pl 
hudi-spark-datasource/hudi-spark3.3.x,packaging/hudi-spark-bundle -am"
+# For Spark 3.4, Scala 2.12:
+# hudi-spark3.4.x_2.12
 # hudi-cli-bundle_2.12
-# hudi-spark3.3-bundle_2.12
-"-Dscala-2.12 -Dspark3.3 -pl 
hudi-spark-datasource/hudi-spark3.3.x,packaging/hudi-spark-bundle,packaging/hudi-cli-bundle
 -am"
+# hudi-spark3.4-bundle_2.12
+"-Dscala-2.12 -Dspark3.4 -pl 
hudi-spark-datasource/hudi-spark3.4.x,packaging/hudi-spark-bundle,packaging/hudi-cli-bundle
 -am"
+# For Spark 3.1, Scala 2.12:
+# All other modules and bundles using avro 1.8
+"-Dscala-2.12 -Dspark3.1"
 
 # Upload legacy Spark bundles (not overwriting previous uploads as these jar 
names are unique)
 "-Dscala-2.11 -Dspark2 -pl packaging/hudi-spark-bundle -am" # for legacy 
bundle name hudi-spark-bundle_2.11
diff --git a/scripts/release/validate_staged_bundles.sh 
b/scripts/release/validate_staged_bundles.sh
index baa0e6c3ffe..866b8cee335 100755
--- a/scripts/release/validate_staged_bundles.sh
+++ b/scripts/release/validate_staged_bundles.sh
@@ -35,8 +35,8 @@ declare -a extensions=("-javadoc.jar" "-javadoc.jar.asc" 
"-javadoc.jar.md5" "-ja
 declare -a bundles=("hudi-aws-bundle" "hudi-cli-bundle_2.11" 
"hudi-cli-bundle_2.12" "hudi-datahub-sync-bundle" "hudi-flink1.13-bundle" 
"hudi-flink1.14-bundle"
 "hudi-flink1.15-bundle" "hudi-flink1.16-bundle" "hudi-flink1.17-bundle" 
"hudi-flink1.18-bundle" "hudi-gcp-bundle" "hudi-hadoop-mr-bundle" 
"hudi-hive-sync-bundle" "hudi-integ-test-bundle"
 "hudi-kafka-connect-bundle" "hudi-metaserver-server-bundle" 
"hudi-presto-bundle" "hudi-spark-bundle_2.11" "hudi-spark-bundle_2.12"
-"hudi-spark2.4-bundle_2.11" "hudi-spark2.4-bundle_2.12" 
"hudi-spark3-bundle_2.12" "hudi-spark3.1-bundle_2.12"
-"hudi-spark3.2-bundle_2.12" "hudi-spark3.3-bundle_2.12" 
"hudi-timeline-server-bundle" "hudi-trino-bundle"
+"hudi-spark2.4-bundle_2.11" "hudi-spark2.4-bundle_2.12" 
"hudi-spark3-bundle_2.12" "hudi-spark3.0-bundle_2.12" 
"hudi-spark3.1-bundle_2.12"
+"hudi-spark3.2-bundle_2.12" "hudi-spark3.3-bundle_2.12" 
"hudi-spark3.4-bundle_2.12" "hudi-timeline-server-bundle" "hudi-trino-bundle"
 "hudi-utilities-bundle_2.11" "hudi-utilities-bundle_2.12" 
"hudi-utilities-slim-bundle_2.11"
 "hudi-utilities-slim-bundle_2.12")
 
@@ -48,9 +48,14 @@ for bundle in "${bundles[@]}"
 do
    for extension in "${extensions[@]}"
    do
-       echo "downloading 
${STAGING_REPO}/$bundle/${VERSION}/$bundle-${VERSION}$extension"
-       wget "${STAGING_REPO}/$bundle/${VERSION}/$bundle-${VERSION}$extension" 
-P "$TMP_DIR_FOR_BUNDLES"
+       url=${STAGING_REPO}/$bundle/${VERSION}/$bundle-${VERSION}$extension
+       if curl --output "$TMP_DIR_FOR_BUNDLES/$bundle-${VERSION}$extension" 
--head --fail "$url"; then
+         echo "Artifact exists: $url"
+       else
+         echo "Artifact missing: $url"
+         exit 1
+       fi
    done
 done
 
-ls -l "$TMP_DIR_FOR_BUNDLES/"
+echo "All artifacts exist. Validation succeeds."
diff --git a/scripts/release/validate_staged_release.sh 
b/scripts/release/validate_staged_release.sh
index 01c3e265b8c..7229378463a 100755
--- a/scripts/release/validate_staged_release.sh
+++ b/scripts/release/validate_staged_release.sh
@@ -40,7 +40,7 @@ if [[ $# -lt 1 ]]; then
 else
     for param in "$@"
     do
-       if [[ $param =~ --release\=([0-9]\.[0-9]*\.[0-9]) ]]; then
+       if [[ $param =~ --release\=([0-9]\.[0-9]*\.[0-9].*) ]]; then
                RELEASE_VERSION=${BASH_REMATCH[1]}
        fi
        if [[ $param =~ --rc_num\=([0-9]*) ]]; then

Reply via email to