This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/hadoop-release-support.git

commit 667099808f7f399cda40dcbb9d22f6beeb97a677
Author: Steve Loughran <ste...@cloudera.com>
AuthorDate: Thu Jul 21 20:06:04 2022 +0100

    3.3.4 RC0 changes
---
 README.md     | 105 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++--
 build.xml     |  47 ++++++++++++++++----------
 pom.xml       |   6 ++--
 src/email.txt |  25 +++-----------
 4 files changed, 139 insertions(+), 44 deletions(-)

diff --git a/README.md b/README.md
index 0affbf1..dd353d2 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
-# Validate Hadoop Client Artifacts
-
-This project imports the hadoop client artifacts to verify that they are (a) 
published on the maven repository and (b) contain the classes we expect.
+# Validate Hadoop Release Artifacts
+l
+This project helps validate hadoop release candidates
 
 It has an ant `build.xml` file to help with preparing the release,
 validating gpg signatures, creating release messages and other things.
@@ -71,3 +71,102 @@ targets of relevance
 |                    |                            |
 
 set `release.native.binaries` to false to skip native binary checks on 
platforms without them
+
+## workflow for preparing an RC
+
+Build the RC using the docker process on whichever host is set to do it
+
+### set up build.properties
+
+```properties
+scp.hostname=stevel-ubuntu
+scp.user=stevel
+scp.hadoop.dir=hadoop
+staging.dir=/Users/stevel/hadoop/release/staging
+spark.dir=/Users/stevel/Projects/sparkwork/spark
+cloud-examples.dir=/Users/stevel/Projects/sparkwork/cloud-integration/cloud-examples
+cloud.test.configuration.file=/Users/stevel/Projects/config/cloud-test-configs/s3a.xml
+bigdata-interop.dir=/Users/stevel/Projects/gcs/bigdata-interop
+hboss.dir=/Users/stevel/Projects/hbasework/hbase-filesystem
+cloudstore.dir=/Users/stevel/Projects/cloudstore
+hadoop.version=3.3.4
+rc=0
+```
+
+### Clean up first
+
+
+```bash
+ant clean
+```
+
+And then purge all artifacts of that release from maven.
+This is critical when validating downstream project builds.
+
+```bash
+ant purge-from-maven
+```
+
+### Download RC to `target/incoming`
+
+This will take a while! look in target/incoming for progress
+
+```bash
+ant scp-artifacts
+```
+
+
+### Move to the release dir
+
+
+```bash
+ant move-scp-artifacts release.dir.check
+```
+
+### verify gpg signing
+
+
+```bash
+ant gpg.keys gpg.verify
+```
+
+### copy to a staging location in the hadoop SVN repository.
+
+When committed to svn it will be uploaded and accessible via an
+https://svn.apache.org URL.
+
+When the RC is released, an `svn move` operation can promote it
+directly.
+
+### In the staging svn repo, update, add and commit the work
+
+This is not part of the tool. Can take a while...exit any VPN for extra speed.
+
+```bash
+svn update
+svn add <RC directory name>
+svn commit 
+```
+
+### tag the rc and push to github
+
+This isn't automated as it needs to be done in the source tree.
+
+```bash
+ant print-tag-command
+```
+
+### Generate the RC vote email
+
+Review/update template message in `src/email.txt`.
+All ant properties referenced will be expanded if set.
+
+```bash
+ant vote-message
+```
+
+The message is printed and saved to the file `target/email.txt`
+
+*do not send it until you have validated the URLs resolve*
+
+## Notes
diff --git a/build.xml b/build.xml
index 2f0854e..954553b 100644
--- a/build.xml
+++ b/build.xml
@@ -16,9 +16,9 @@
   ~ limitations under the License.
   -->
 
-<project name="download" default="dist" basedir=".">
+<project name="release-validator" default="init" basedir=".">
   <description>
-    build file to manage validation of artifacts.
+    build file to manage release and validation of artifacts.
     Maven is one of the targets here.
 
     hadoop version is set in the property hadoop.version
@@ -29,10 +29,6 @@
 
     for building other modules to work, this ant build must be on java11
 
-    set -gx JAVA_HOME $JAVA11_HOME; and echo $JAVA_HOME
-
-    or in fish
-
     For validating artifacts put up as an an RC, use the http-artifacts target
     to retrieve, with http.source set to the url, e.g
     http.source=https://home.apache.org/~iwasakims/hadoop-2.10.2-RC0/
@@ -45,14 +41,14 @@
   <property file="build.properties"/>
 
 
-  <property name="rc" value="RC1"/>
+  <property name="rc" value="RC0"/>
 
   <property name="dist.dir" location="${target}/dist"/>
   <property name="incoming.dir" location="${target}/incoming"/>
 
 
-  <!--  base name of a release, 3.3.3-RC0 -->
-  <property name="hadoop.version" value="3.3.3"/>
+  <!--  base name of a release, 3.3.4-RC0 -->
+  <property name="hadoop.version" value="3.3.4"/>
   <property name="rc.name" value="${hadoop.version}-${rc}"/>
 
   <!-- for spark builds -->
@@ -60,7 +56,8 @@
 
 
   <property name="release" value="hadoop-${hadoop.version}"/>
-  <property name="release.dir" location="${target}/${release}-${rc}"/>
+  <property name="rc-dirname" value="${release}-${rc}"/>
+  <property name="release.dir" location="${target}/${rc-dirname}"/>
   <property name="staged.artifacts.dir" location="${staging.dir}/${rc.name}"/>
 
   <property name="tag.name" value="release-${rc.name}"/>
@@ -121,6 +118,7 @@
     <echo>
       hadoop.version=${hadoop.version}
       rc=${rc}
+      git.commit.id=${git.commit.id}
 
       Fetching and validating artifacts in ${release.dir}
       release.dir=${release.dir}
@@ -150,7 +148,7 @@
     <delete dir="${target}"/>
   </target>
 
-  <target name="purge" depends="init"
+  <target name="purge-from-maven" depends="init"
     description="purge all artifacts from the maven repo">
     <property name="mvn.repo"
       location="${user.home}/.m2/repository"/>
@@ -187,6 +185,7 @@
 
     <delete dir="${incoming.dir}"/>
     <mkdir dir="${incoming.dir}"/>
+    <echo>Downloading to ${incoming.dir}; may take a while</echo>
     <!-- scp -r $srv:hadoop/target/artifacts ~/Projects/Releases
     -->
     <x executable="scp">
@@ -204,12 +203,12 @@
     <move
       file="${incoming.dir}/artifacts"
       tofile="${release.dir}"/>
+    <echo>Moved scp downloaded artifacts to ${release.dir}</echo>
   </target>
 
   <target name="release.dir.check" depends="init">
     <verify-release-dir />
 
-
     <x executable="ls">
       <arg value="-l"/>
       <arg value="${release.dir}"/>
@@ -268,7 +267,7 @@
   <target name="stage" depends="init"
     description="copy the RC to the svn staging dir">
 
-    <fail message="unset: ${staging.dir}"/>
+    <fail message="unset: staging.dir" unless="staging.dir"/>
 
     <move
       file="${release.dir}"
@@ -281,11 +280,13 @@
   </target>
 
   <target name="print-tag-command"
-    description="print the git command to tag the rc">
+    description="print the git command to tag the rc"
+    depends="init">
+    <require p="git.commit.id"/>
     <echo>
       command to tag the commit is
 
-      git tag -s ${tag.name} -m "Release candidate -${rc.name}"
+      git tag -s ${tag.name} -m "Release candidate ${rc.name}" ${git.commit.id}
       git push apache ${tag.name}
     </echo>
   </target>
@@ -295,6 +296,13 @@
     depends="init"
     description="build the vote message">
 
+    <fail unless="git.commit.id">
+      Set the git commit number in git.commit.id
+    </fail>
+
+    <fail unless="nexus.staging.url">
+      Set the nexus staging repository URL in nexus.staging.url
+    </fail>
 
     <loadfile property="message.txt"
       srcFile="src/email.txt">
@@ -307,6 +315,11 @@
 
     <echo>${message.txt}</echo>
     <echo file="${message.out}">${message.txt}</echo>
+    <echo>
+----------
+Message is in file ${message.out}
+    </echo>
+
   </target>
 
   <target name="spark.build" if="spark.dir"
@@ -431,7 +444,7 @@
 
     <delete dir="${incoming.dir}"/>
     <mkdir dir="${incoming.dir}"/>
-    <!-- list and then wget the immediate children  into the incoming dir -->
+    <!-- list and then wget the immediate children into the incoming dir -->
     <x executable="wget" dir="${incoming.dir}" >
       <arg value="--no-parent"/>
       <arg value="--recursive"/>
@@ -449,7 +462,7 @@
   </target>
 
   <target name="release.src.untar" depends="release.dir.check"
-    description="untar the release">
+    description="untar the release source">
 
     <gunzip src="${release.dir}/${release}-src.tar.gz" dest="target/untar"/>
     <untar src="target/untar/${release}-src.tar" dest="${release.source.dir}" 
/>
diff --git a/pom.xml b/pom.xml
index 444ba98..e65f3f0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -36,7 +36,7 @@
     <maven-antrun-plugin.version>1.7</maven-antrun-plugin.version>
 
 
-    <hadoop.version>3.3.3</hadoop.version>
+    <hadoop.version>3.3.4</hadoop.version>
 
     <!-- SLF4J/LOG4J version -->
     <slf4j.version>1.7.36</slf4j.version>
@@ -151,9 +151,9 @@
       </repositories>
     </profile>
     <profile>
-      <id>hadoop-3.3.3</id>
+      <id>hadoop-3.3.4</id>
       <properties>
-        <hadoop.version>3.3.3</hadoop.version>
+        <hadoop.version>3.3.4</hadoop.version>
       </properties>
     </profile>
 
diff --git a/src/email.txt b/src/email.txt
index 7ac06e8..c6bf912 100644
--- a/src/email.txt
+++ b/src/email.txt
@@ -3,9 +3,9 @@
 I have put together a release candidate (${rc}) for Hadoop ${hadoop.version}
 
 The RC is available at:
-https://dist.apache.org/repos/dist/dev/hadoop/${rc.name}/
+https://dist.apache.org/repos/dist/dev/hadoop/${rc-dirname}/
 
-The git tag is ${tag.name}, commit d37586cbda3
+The git tag is ${tag.name}, commit ${git.commit.id}
 
 The maven artifacts are staged at
 ${nexus.staging.url}
@@ -14,31 +14,14 @@ You can find my public key at:
 https://dist.apache.org/repos/dist/release/hadoop/common/KEYS
 
 Change log
-https://dist.apache.org/repos/dist/dev/hadoop/${rc.name}/CHANGELOG.md
+https://dist.apache.org/repos/dist/dev/hadoop/${rc-dirname}/CHANGELOG.md
 
 Release notes
-https://dist.apache.org/repos/dist/dev/hadoop/${rc.name}/RELEASENOTES.md
+https://dist.apache.org/repos/dist/dev/hadoop/${rc-dirname}/RELEASENOTES.md
 
 There's a very small number of changes, primarily critical code/packaging
 issues and security fixes.
 
-* The critical fixes which shipped in the 3.2.3 release.
-* CVEs in our code and dependencies
-* Shaded client packaging issues.
-* A switch from log4j to reload4j
-
-reload4j is an active fork of the log4j 1.17 library with the classes
-which contain CVEs removed. Even though hadoop never used those classes,
-they regularly raised alerts on security scans and concen from users.
-Switching to the forked project allows us to ship a secure logging
-framework. It will complicate the builds of downstream
-maven/ivy/gradle projects which exclude our log4j artifacts, as they
-need to cut the new dependency instead/as well.
-
 See the release notes for details.
 
-This is the second release attempt. It is the same git commit as before, but
-fully recompiled with another republish to maven staging, which has bee
-verified by building spark, as well as a minimal test project.
-
 Please try the release and vote. The vote will run for 5 days.


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to