This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch branch-3.5 in repository https://gitbox.apache.org/repos/asf/spark.git
commit ce1bae90ec9420ed22111c1abe23a09804f2d2a2 Author: Hyukjin Kwon <gurwls...@apache.org> AuthorDate: Sun Jun 8 17:16:44 2025 -0700 Automatically send the RC vote email --- dev/create-release/release-build.sh | 91 ++++++++++++++++++++++++++++++++++ dev/create-release/release-util.sh | 1 + dev/create-release/spark-rm/Dockerfile | 2 +- 3 files changed, 93 insertions(+), 1 deletion(-) diff --git a/dev/create-release/release-build.sh b/dev/create-release/release-build.sh index ca8739ba6aa7..898397b7ac7a 100755 --- a/dev/create-release/release-build.sh +++ b/dev/create-release/release-build.sh @@ -538,6 +538,97 @@ if [[ "$1" == "publish-release" ]]; then -H "Content-Type:application/xml" -v \ $NEXUS_ROOT/profiles/$NEXUS_PROFILE/finish) echo "Closed Nexus staging repository: $staged_repo_id" + + echo "Sending the RC vote email" + EMAIL_TO="d...@spark.apache.org" + EMAIL_SUBJECT="[VOTE] Release Spark ${SPARK_VERSION} (RC${SPARK_RC_COUNT})" + + # Calculate deadline in Pacific Time (PST/PDT) + DEADLINE=$(TZ=America/Los_Angeles date -d "+4 days" "+%a, %d %b %Y %H:%M:%S %Z") + + JIRA_API_URL="https://issues.apache.org/jira/rest/api/2/project/SPARK/versions" + JIRA_VERSION_ID=$(curl -s "$JIRA_API_URL" | \ + # Split JSON objects by replacing '},{' with a newline-separated pattern + tr '}' '\n' | \ + # Find the block containing the exact version name + grep -F "\"name\":\"$SPARK_VERSION\"" -A 5 | \ + # Extract the line with "id" + grep '"id"' | \ + # Extract the numeric id value (assuming "id":"123456") + sed -E 's/.*"id":"?([0-9]+)"?.*/\1/' | \ + head -1) + + # Configure msmtp + cat > ~/.msmtprc <<EOF +defaults +auth on +tls on +tls_trust_file /etc/ssl/certs/ca-certificates.crt +logfile ~/.msmtp.log + +account apache +host mail-relay.apache.org +port 587 +from $asf_usern...@apache.org +user $ASF_USERNAME +password $ASF_PASSWORD + +account default : apache +EOF + + chmod 600 ~/.msmtprc + + # Compose and send the email + { + echo "From: $asf_usern...@apache.org" + echo "To: $EMAIL_TO" + echo "Subject: $EMAIL_SUBJECT" + echo + echo "Please vote on releasing the following candidate as Apache Spark version ${SPARK_VERSION}." + echo + echo "The vote is open until ${DEADLINE} and passes if a majority +1 PMC votes are cast, with" + echo "a minimum of 3 +1 votes." + echo + echo "[ ] +1 Release this package as Apache Spark ${SPARK_VERSION}" + echo "[ ] -1 Do not release this package because ..." + echo + echo "To learn more about Apache Spark, please see https://spark.apache.org/" + echo + echo "The tag to be voted on is ${GIT_REF} (commit ${git_hash}):" + echo "https://github.com/apache/spark/tree/${GIT_REF}" + echo + echo "The release files, including signatures, digests, etc. can be found at:" + echo "https://dist.apache.org/repos/dist/dev/spark/${GIT_REF}-bin/" + echo + echo "Signatures used for Spark RCs can be found in this file:" + echo "https://dist.apache.org/repos/dist/dev/spark/KEYS" + echo + echo "The staging repository for this release can be found at:" + echo "https://repository.apache.org/content/repositories/${staged_repo_id}/" + echo + echo "The documentation corresponding to this release can be found at:" + echo "https://dist.apache.org/repos/dist/dev/spark/${GIT_REF}-docs/" + echo + echo "The list of bug fixes going into ${SPARK_VERSION} can be found at the following URL:" + echo "https://issues.apache.org/jira/projects/SPARK/versions/${JIRA_VERSION_ID}" + echo + echo "FAQ" + echo + echo "=========================" + echo "How can I help test this release?" + echo "=========================" + echo + echo "If you are a Spark user, you can help us test this release by taking" + echo "an existing Spark workload and running on this release candidate, then" + echo "reporting any regressions." + echo + echo "If you're working in PySpark you can set up a virtual env and install" + echo "the current RC via \"pip install https://dist.apache.org/repos/dist/dev/spark/${GIT_REF}-bin/pyspark-${SPARK_VERSION}.tar.gz\"" + echo "and see if anything important breaks." + echo "In the Java/Scala, you can add the staging repository to your project's resolvers and test" + echo "with the RC (make sure to clean up the artifact cache before/after so" + echo "you don't end up building with an out of date RC going forward)." + } | msmtp -t fi popd diff --git a/dev/create-release/release-util.sh b/dev/create-release/release-util.sh index 687ccab7e1ca..acdecadeee33 100755 --- a/dev/create-release/release-util.sh +++ b/dev/create-release/release-util.sh @@ -141,6 +141,7 @@ function get_release_info { if [ -n "$SPARK_RC_COUNT" ]; then RC_COUNT=$SPARK_RC_COUNT fi + export SPARK_RC_COUNT=$RC_COUNT # Check if the RC already exists, and if re-creating the RC, skip tag creation. RELEASE_TAG="v${RELEASE_VERSION}-rc${RC_COUNT}" diff --git a/dev/create-release/spark-rm/Dockerfile b/dev/create-release/spark-rm/Dockerfile index 789915d018de..4891fd0e328f 100644 --- a/dev/create-release/spark-rm/Dockerfile +++ b/dev/create-release/spark-rm/Dockerfile @@ -77,7 +77,7 @@ RUN apt-get clean && apt-get update && $APT_INSTALL gnupg ca-certificates && \ # Install R packages and dependencies used when building. # R depends on pandoc*, libssl (which are installed above). # Note that PySpark doc generation also needs pandoc due to nbsphinx - $APT_INSTALL r-base r-base-dev && \ + $APT_INSTALL r-base r-base-dev msmtp && \ $APT_INSTALL libcurl4-openssl-dev libgit2-dev libssl-dev libxml2-dev && \ $APT_INSTALL texlive-latex-base texlive texlive-fonts-extra texinfo qpdf texlive-latex-extra && \ $APT_INSTALL libfontconfig1-dev libharfbuzz-dev libfribidi-dev libfreetype6-dev libpng-dev libtiff5-dev libjpeg-dev && \ --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org