This is an automated email from the ASF dual-hosted git repository. stevel pushed a commit to branch main in repository https://gitbox.apache.org/repos/asf/hadoop-release-support.git
commit 97d7d4ae73ef692956ffea17248b810bcb076f58 Author: Steve Loughran <ste...@cloudera.com> AuthorDate: Mon Mar 24 15:22:47 2025 +0000 HADOOP-19483. Lots of work on the thirdparty release process as part of 1.4.0 release --- README.md | 23 +--- build.xml | 132 +++++++++++++++++++-- doc/thirdparty.md | 46 +++++-- .../3p-release-1.4.0.properties} | 28 ++--- src/releases/release-info-3.4.1.properties | 1 - src/text/3p.vote.txt | 7 +- 6 files changed, 176 insertions(+), 61 deletions(-) diff --git a/README.md b/README.md index d99c13f..37cbbc1 100644 --- a/README.md +++ b/README.md @@ -496,7 +496,7 @@ http.source=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-${hadoop.versio set `check.native.binaries` to false to skip native binary checks on platforms without them -### Download the RC files from the http server +### Download the Staged RC files from the Apache http servers Downloads under `downloads/incoming` ```bash @@ -504,7 +504,7 @@ ant release.fetch.http ``` -### verify gpg signatures +### Verify GPG signatures ```bash ant gpg.keys gpg.verify @@ -522,7 +522,7 @@ If you don't yet trust the key of whoever signed the release then 2. Perform whatever key verification you can and sign the key that level -ideally push up the signature to the servers. -### untar source and build. +### Untar source and build. This puts the built artifacts into the local maven repo so do not do this while building/testing downstream projects @@ -729,23 +729,6 @@ Spark itself does not include any integration tests of the object store connecto This independent module tests the s3a, gcs and abfs connectors, and associated committers, through the spark RDD and SQL APIs. - -## Build and test HBase HBoss filesystem - -*Hadoop 3.4.0 notes: the changes to test under v2 SDK aren't merged in; expect failure.* - -[hbase-filesystem](https://github.com/apache/hbase-filesystem.git) - -Adds zookeeper-based locking on those filesystem API calls for which -atomic access is required. - -Integration tests will go through S3A connector. - -```bash -ant hboss.build -``` - - ## Parquet build and test To clean build Apache Parquet: diff --git a/build.xml b/build.xml index e5edf86..24078f9 100644 --- a/build.xml +++ b/build.xml @@ -240,27 +240,47 @@ </presetdef> <presetdef name="gpgv"> - <gpg dir="${release.dir}"> - </gpg> + <gpg dir="${release.dir}" /> </presetdef> <!-- verify a file in the release dir; automatically adds the .asc suffix --> - <macrodef name="gpgverify" > - <attribute name="name" /> + + <macrodef name="gpg-verify-file" > + <attribute name="dir" /> + <attribute name="file" /> <sequential> - <echo>Verifying GPG signature of ${release.dir}/@{name}</echo> - <gpg dir="${release.dir}"> + <echo>Verifying GPG signature of @{dir}/@{name}</echo> + <gpg dir="@{dir}"> <arg value="--verify"/> <arg value="@{name}.asc"/> </gpg> </sequential> </macrodef> + <!-- verify a file in the release dir; + automatically adds the .asc suffix --> + <macrodef name="gpgverify" > + <attribute name="name" /> + <sequential> + <gpg-verify-file + dir="${release.dir}" + name="@{name}" /> + </sequential> + </macrodef> + + <presetdef name="git"> + <x executable="git"/> + </presetdef> + <presetdef name="svn"> <x executable="svn"/> </presetdef> + <presetdef name="rm"> + <delete quiet="true"/> + </presetdef> + <!-- require a dir to exist. --> <macrodef name="require-dir"> @@ -324,7 +344,7 @@ <echo> deleting ${hadoop.artifacts}/**/${hadoop.version}/* </echo> - <delete> + <delete quiet="true"> <fileset dir="${hadoop.artifacts}" includes="**/${hadoop.version}/*"/> </delete> @@ -1598,11 +1618,19 @@ ${arm.asc} <set name="3p.rc.name" value="${3p.version}-${3p.rc}"/> <set name="3p.rc.dirname" value="${3p.release}-${3p.rc}"/> + <echo>3rd party release ${3p.release} JIRA ${3p.jira.id} commit ${3p.git.commit.id} </echo> + + <!-- paths for downloading --> + <setpath name="3p.dist.dir" location="${downloads.dir}/hadoop-thirdparty/dist"/> + <setpath name="3p.incoming.dir" location="${downloads.dir}/hadoop-thirdparty/incoming"/> + <!-- the actual staging/release paths are in the same svn repo as hadoop main --> <set name="3p.svn.staging.url" value="${svn.apache.dist}/repos/dist/dev/hadoop/${3p.rc.dirname}"/> <set name="3p.svn.production.url" value="${svn.apache.dist}/repos/dist/release/hadoop/thirdparty/${3p.release}"/> + + <!-- where the artifacts end up--> <setpath name="3p.staged.artifacts.dir" location="${staging.dir}/${3p.rc.dirname}"/> <set name="3p.tag.name" value="release-${3p.rc.name}"/> @@ -1623,11 +1651,12 @@ ${arm.asc} <echo> deleting all hadoop-thirdparty artifacts + ${hadoop.artifacts}/thirdparty/**/${3p.version}/* </echo> - <delete> - <fileset dir="${hadoop.artifacts}/hadoop-thirdparty" - includes="**/{3p.version}/*"/> - </delete> + <rm> + <fileset dir="${hadoop.artifacts}/thirdparty" + includes="**/${3p.version}/*"/> + </rm> </target> @@ -1636,8 +1665,11 @@ ${arm.asc} depends="3p.init"> <require p="3p.git.commit.id"/> <echo> + + cd ${3p.local.repo.path} + # command to tag the commit - git tag -s ${3p.tag.name} -m "Release candidate ${3p.rc.name}" ${3p.git.commit.id} + git tag -s ${3p.tag.name} -m "Release candidate ${3p.rc.name}" # how to verify it git tag -v ${3p.tag.name} @@ -1660,6 +1692,27 @@ ${arm.asc} </echo> </target> + + <target name="3p.git-tag-source" + description="tag the HEAD of thirdparty source with the current RC version" + depends="3p.init"> + <require p="3p.local.repo.path" /> + <require-dir path="${3p.local.repo.path}" /> + <git dir="${3p.local.repo.path}"> + <arg value="tag" /> + <arg value="-s" /> + <arg value="${3p.tag.name}" /> + <arg value="-m" /> + <arg value="Release candidate ${3p.rc.name}" /> + </git> + <git dir="${3p.local.repo.path}"> + <arg value="tag" /> + <arg value="-v" /> + <arg value="${3p.tag.name}" /> + </git> + + </target> + <target name="3p.vote-message" depends="3p.init" description="build the vote message"> @@ -1690,6 +1743,59 @@ Message is in file ${3p.message.out} </target> + <!-- + From a local build of the third party artifacts, + or a downloaded set of remote artifacts + (the hadoop-thirdparty/target/ dir) + + Move them to the staging area + --> + + <target name="3p.stage" + description="move artifacts of the local build to the staging area" + depends="3p.init"> + <require p="3p.local.target.dir" /> + <setpath name="3p.local.artifacts.dir" + location="${3p.local.target.dir}/artifacts" /> + <require-dir path="${3p.local.artifacts.dir}" /> + + <delete dir="${3p.staged.artifacts.dir}"/> + <echo> + copying dir "${3p.local.artifacts.dir}" + to "${3p.staged.artifacts.dir}" + </echo> + <move + file="${3p.local.artifacts.dir}" + tofile="${3p.staged.artifacts.dir}"/> + </target> + + <!-- + After the local artifacts are in the staging dir, + add them to svn and then commit + --> + <target name="3p.stage-to-svn" + description="stage the RC into svn" + depends="staging-init, 3p.init"> + <require-dir path="${3p.staged.artifacts.dir}" /> + + <svn dir="${staging.dir}"> + <arg value="update" /> + </svn> + <svn dir="${staging.dir}"> + <arg value="add" /> + <arg value="${3p.staged.artifacts.dir}" /> + </svn> + <echo>Comitting with message ${staging.commit.msg}. Please wait</echo> + <svn dir="${staging.dir}"> + <arg value="commit" /> + <arg value="-m" /> + <arg value="${3p.staging.commit.msg}" /> + </svn> + </target> + + <!-- + thirdparty rollback; svn work + --> <target name="3p.stage-svn-rollback" description="rollback a thirdparty version staged to RC" depends="3p.init"> @@ -1709,7 +1815,6 @@ Message is in file ${3p.message.out} </svn> </target> - <target name="3p.stage-move-to-production" description="promote the staged the thirdparty RC into dist" depends="3p.init"> @@ -1739,4 +1844,5 @@ Message is in file ${3p.message.out} </svn> </target> + </project> diff --git a/doc/thirdparty.md b/doc/thirdparty.md index 15b600b..1a9916f 100644 --- a/doc/thirdparty.md +++ b/doc/thirdparty.md @@ -57,19 +57,49 @@ targets in the build to validate the third party release All targets are prefixed `3p.` -| target | function | -|-------------------|------------------------------------------------------| -| `3p.mvn-purge` | remove all third party artifacts from the local repo | -| `3p.vote-message` | generate a vote message in target/3p.vote.txt | -| `3p.print-tag-command` | Print all the tag commands for a release | - ``` + 3p.git-tag-source tag the HEAD of thirdparty source with the current RC version 3p.mvn-purge purge all local hadoop-thirdparty - 3p.print-tag-command print the git command to tag the rc + 3p.stage move artifacts of the local build to the staging area + 3p.stage-to-svn stage the RC into svn + 3p.vote-message build the vote message 3p.stage-move-to-production promote the staged the thirdparty RC into dist 3p.stage-svn-rollback rollback a thirdparty version staged to RC - 3p.vote-message build the vote message + 3p.print-tag-command print the git command to tag the rc ``` Third party artifacts must be staged to the same svn repository as for staging full hadoop releases, as set in `staging.dir` + + +### Download the Staged RC files from the Apache http servers + +Downloads under `downloads/incoming` +```bash +ant 3p.release.fetch +``` + + +### Verify GPG signatures + +```bash +ant gpg.keys 3p.gpg.verify +``` +This will import all the KEYS from +[https://downloads.apache.org/hadoop/common/KEYS](https://downloads.apache.org/hadoop/common/KEYS), +then verify the signature of each downloaded file. + +If you don't yet trust the key of whoever signed the release then +1. Refresh the keys from the OpenPGP server, to see + if they've been signed by others. + + gpg --refresh-keys + +2. Perform whatever key verification you can and sign the key that + level -ideally push up the signature to the servers. + +## Cancelling an RC + +```bash +ant 3p.stage-svn-rollback +``` diff --git a/src/releases/release-info-3.4.1.properties b/src/releases/3p/3p-release-1.4.0.properties similarity index 54% copy from src/releases/release-info-3.4.1.properties copy to src/releases/3p/3p-release-1.4.0.properties index 6bd91ac..f336dea 100644 --- a/src/releases/release-info-3.4.1.properties +++ b/src/releases/3p/3p-release-1.4.0.properties @@ -14,20 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -# property file for 3.4.1 -hadoop.version=3.4.1 -rc=RC3 -previous.version=3.4.0 -release.branch=3.4.1 -git.commit.id=4d7825309348956336b8f06a08322b78422849b1 - -jira.id=HADOOP-19087 -jira.title=Release Hadoop 3.4.1 - -amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-3.4.1-RC3/ -amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-3.4.1-RC3 -arm.src.dir=${amd.src.dir} -http.source=${amd.src.dir} -asf.staging.url=https://repository.apache.org/content/repositories/orgapachehadoop-1430 -cloudstore.profile=sdk2 +# Release Info for thirdparty-1.4.0 +#3p.version=1.4.0 +3p.rc=RC1 +3p.branch=https://github.com/apache/hadoop-thirdparty/commits/release-1.4.0-RC0 +3p.git.commit.id=a1eda71bbfa630138b97ecac1a977c18d05f0003 +3p.jira.id=HADOOP-19483 +3p.nexus.staging.url=https://repository.apache.org/content/repositories/orgapachehadoop-1434 +3p.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-thirdparty-1.4.0-RC0 +3p.staging.url=https://dist.apache.org/repos/dist/dev/hadoop/thirdparty-1.4.0-${3p.rc} +3p.tag.name=release-1.4.0-RC1 +# 3p.local.repo.path=<location of third party source repo> +# 3p.local.target.dir=<target/ directory of your local (or scp'd over /target) dir) diff --git a/src/releases/release-info-3.4.1.properties b/src/releases/release-info-3.4.1.properties index 6bd91ac..2c6423e 100644 --- a/src/releases/release-info-3.4.1.properties +++ b/src/releases/release-info-3.4.1.properties @@ -24,7 +24,6 @@ git.commit.id=4d7825309348956336b8f06a08322b78422849b1 jira.id=HADOOP-19087 jira.title=Release Hadoop 3.4.1 -amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-3.4.1-RC3/ amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-3.4.1-RC3 arm.src.dir=${amd.src.dir} http.source=${amd.src.dir} diff --git a/src/text/3p.vote.txt b/src/text/3p.vote.txt index 91807c9..fa6f602 100644 --- a/src/text/3p.vote.txt +++ b/src/text/3p.vote.txt @@ -7,13 +7,14 @@ I have built a release candidate (${3p.rc}) for Hadoop-Thirdparty ${3p.version}. The RC is available at: ${3p.staging.url}/ -The git tag is ${3p.tag.name}, commit ${3p.git.commit.id} +The git tag is ${3p.tag.name}, +and commit ${3p.git.commit.id} The maven artifacts are staged at ${3p.nexus.staging.url} Please try the release and vote. The vote will run for 5 days. -Here is vote: +Here is my vote: -+1 (binding) \ No newline at end of file ++1 (binding) --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org