This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/hadoop-release-support.git

commit dd4a2e9e6b5c51ccc0ed310abf713af8b256d626
Author: Steve Loughran <ste...@cloudera.com>
AuthorDate: Wed May 11 18:27:36 2022 +0100

    generating email message, readme, ...
---
 README.md     |  18 +++----
 build.xml     | 147 ++++++++++++++++++++++++++++++++++++++++++++++++++++------
 pom.xml       |  82 ++++++--------------------------
 src/email.txt |  44 ++++++++++++++++++
 4 files changed, 201 insertions(+), 90 deletions(-)

diff --git a/README.md b/README.md
index a2c7295..48374ce 100644
--- a/README.md
+++ b/README.md
@@ -2,10 +2,14 @@
 
 This project imports the hadoop client artifacts to verify that they are (a) 
published on the maven repository and (b) contain the classes we expect.
 
-To build and test with the client API:l
+It also has an ant `build.xml` file to help with preparing the release,
+validating gpg signatures, creating release messages and other things.
+
+
+To build and test with the client API:
 
 ```bash
-mvn clean test -Pclient
+mvn clean test 
 ```
 
 Compilation verifies the API is present; the
@@ -15,23 +19,19 @@ If the hadoop artifacts are in staging/snapshot 
repositories,
 use the `staging` profile
 
 ```bash
-mvn clean test -Pclient -Pstaging
+mvn clean test -Pstaging
 ```
 
 To force an update
 
 ```bash
-mvn clean test -Pclient -Pstaging -U
+mvn clean test -Pstaging -U
 ```
 
 To purge all artifacts of the chosen hadoop version from your local maven 
repository.
 
 ```bash
-mvn clean -Ppurge
+ant purge
 ```
 
-*do not use with the dependency declaration of -Pclient or things get confused*
-(it will try to resolve the artifacts, even as they are deleted)
 
-There are different profiles for different versions;
-the default value is 3.3.3
diff --git a/build.xml b/build.xml
index f14cb92..cb11e66 100644
--- a/build.xml
+++ b/build.xml
@@ -38,14 +38,24 @@
   <loadproperties srcFile="release.properties" />
   <!--  utterly superfluous, but ensures that IDEA knows about the properties 
-->
   <property file="release.properties" />
-
-  <property name="dist" location="${target}/dist"/>
-  <property name="artifacts" location="${target}/artifacts"/>
   <property name="rc" value="RC1"/>
 
-  <!--  base name of a release-->
+  <property name="dist.dir" location="${target}/dist"/>
+  <property name="incoming.dir" location="${target}/incoming"/>
+
+
+
+  <!--  base name of a release, 3.3.3-RC0 -->
+  <property name="rc.name" value="${hadoop.version}-${rc}"/>
+
+
   <property name="release" value="hadoop-${hadoop.version}"/>
+  <property name="release.dir" location="${target}/${release}-${rc}"/>
+  <property name="staged.artifacts.dir" location="${staging.dir}/${rc.name}"/>
 
+  <property name="tag.name" value="release-${rc.name}"/>
+  <property name="nexus.staging.url"
+    
value="https://repository.apache.org/content/repositories/orgapachehadoop-1349/"/>
 
 
   <target name="init">
@@ -55,17 +65,27 @@
     </presetdef>
 
     <presetdef name="mvn">
-      <x command="mvn"/>
+      <x executable="mvn"/>
+    </presetdef>
+
+    <presetdef name="gpg">
+      <x executable="gpg"/>
     </presetdef>
 
 
-    <mkdir dir="${dist}"/>
+    <mkdir dir="${target}"/>
+
+    <echo>
+      Fetching and validating artifacts in ${release.dir}
+      staging to ${staging.dir}
+      staged artifacts to ${staged.artifacts.dir}
+    </echo>
   </target>
 
   <target name="clean"
         description="clean up target/ dir">
     <!-- Delete the ${dist} directory trees -->
-    <delete dir="${dist}"/>
+    <delete dir="${target}"/>
   </target>
 
   <target name="purge" depends="init"
@@ -75,6 +95,9 @@
     <property name="hadoop.artifacts"
       location="${mvn.repo}/org/apache/hadoop"/>
 
+    <echo>
+      deleting ${hadoop.artifacts}/**/${hadoop.version}/*
+    </echo>
     <delete>
       <fileset dir="${hadoop.artifacts}"
         includes="**/${hadoop.version}/*"/>
@@ -83,32 +106,128 @@
   </target>
 
   <target name="scp-artifacts" depends="init"
-        description="scp the artifacts from a remote host, cleaning any local 
entries first">
+        description="scp the artifacts from a remote host. may be slow">
     <fail unless="scp.hostname" />
     <fail unless="scp.user" />
     <fail unless="scp.hadoop.dir" />
     <property name="scp.source"
       value="${scp.user}@${scp.hostname}:${scp.hadoop.dir}/target/artifacts"/>
 
-    <delete dir="${artifacts}"/>
-    <mkdir dir="${artifacts}"/>
+    <delete dir="${incoming.dir}"/>
+    <mkdir dir="${incoming.dir}"/>
 <!-- scp -r $srv:hadoop/target/artifacts ~/Projects/Releases
 -->
     <x executable="scp">
       <arg value="-r"/>
       <arg value="${scp.source}"/>
-      <arg value="${artifacts}"/>
+      <arg value="${incoming.dir}"/>
     </x>
+
+  </target>
+
+
+  <target name="move-scp-artifacts" depends="init"
+        description="move the downloaded artifacts">
+    <delete dir="${release.dir}"/>
+    <move
+      file="${incoming.dir}/artifacts"
+      tofile="${release.dir}"/>
   </target>
 
-  <target name="gpg-verify" depends="init"
+
+  <target name="gpgv" depends="init"
         description="verify the downloaded artifacts">
+    <fail message="dir missing: ${release.dir}">
+        <condition>
+            <not>
+              <available file="${release.dir}"/>
+            </not>
+        </condition>
+    </fail>
+
+    <x executable="ls">
+      <arg value="-l"/>
+      <arg value="${release.dir}"/>
+    </x>
+
+    <presetdef name="gpgv">
+      <gpg dir="${release.dir}">
+      </gpg>
+    </presetdef>
+
+    <gpgv>
+      <arg value="--verify"/>
+      <arg value="${release}-src.tar.gz.asc"/>
+    </gpgv>
+    <gpgv>
+      <arg value="--verify"/>
+      <arg value="${release}-site.tar.gz.asc"/>
+    </gpgv>
+    <gpgv>
+      <arg value="--verify"/>
+      <arg value="${release}.tar.gz.asc"/>
+    </gpgv>
+
+    <gpgv>
+      <arg value="--verify"/>
+      <arg value="${release}-rat.txt.asc"/>
+    </gpgv>
+
+    <gpgv>
+      <arg value="--verify"/>
+      <arg value="RELEASENOTES.md.asc"/>
+    </gpgv>
+
+    <gpgv>
+      <arg value="--verify"/>
+      <arg value="CHANGELOG.md.asc"/>
+    </gpgv>
+
+
   </target>
 
+  <target name="stage" depends="init"
+        description="copy the RC to the svn staging dir">
+
+    <fail message="unset: ${staging.dir}" />
+
+    <move
+      file="${release.dir}"
+      todir="${staging.dir}"/>
+    <x executable="ls">
+      <arg value="-l"/>
+      <arg value="${staging.dir}"/>
+    </x>
+
+  </target>
+
+  <target name="print-tag-command"
+  description="print the git command to tag the rc">
+    <echo>
+      command to tag the commit is
+
+      git tag -s ${tag.name} -m "Release candidate -${rc.name}"
+      git push apache ${tag.name}
+    </echo>
+  </target>
+
+
+  <target name="vote-message"
+    depends="init"
+    description="build the vote message">
+
 
-  <target name="dist" depends="init"
-        description="generate the distribution">
+    <loadfile property="message.txt"
+      srcFile="src/email.txt">
+        <filterchain>
+            <expandproperties/>
+        </filterchain>
+    </loadfile>
+    <property name="message.out"
+      location="${target}/email.txt"/>
 
+    <echo>${message.txt}</echo>
+    <echo file="${message.out}">${message.txt}</echo>
   </target>
 
 </project>
diff --git a/pom.xml b/pom.xml
index 2eb0875..444ba98 100644
--- a/pom.xml
+++ b/pom.xml
@@ -17,17 +17,13 @@
 
     To build with the client API
 
-    mvn clean package -Pclient
+    mvn clean test
 
     to force an update
 
-    mvn clean package -Pclient -Pstaging -U
+    mvn clean test  -Pstaging -U
 
-    to purge all artifacts of that version from your local maven repo
-    mvn clean -Ppurge
 
-    *do not use with the dependency declaration of -Pclient or things get 
confused*
-    (it will try to resolve the artifacts, even as they are deleted)
   </description>
 
   <properties>
@@ -50,6 +46,19 @@
 
   <dependencies>
 
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-api</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-runtime</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
@@ -162,67 +171,6 @@
       </properties>
     </profile>
 
-    <!--    build with the client api-->
-    <profile>
-      <id>client</id>
-
-      <dependencies>
-
-
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-api</artifactId>
-          <version>${hadoop.version}</version>
-        </dependency>
-
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-runtime</artifactId>
-          <version>${hadoop.version}</version>
-        </dependency>
-
-      </dependencies>
-    </profile>
-
-
-    <profile>
-      <id>purge</id>
-
-      <build>
-        <plugins>
-
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>delete RC artifacts</id>
-                <phase>clean</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-
-                    <property name="mvn.repo"
-                      location="${user.home}/.m2/repository"/>
-                    <property name="hadoop.artifacts"
-                      location="${mvn.repo}/org/apache/hadoop"/>
-
-                    <delete>
-                      <fileset dir="${hadoop.artifacts}"
-                        includes="**/${hadoop.version}/*"/>
-                    </delete>
-
-
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
 
   </profiles>
 
diff --git a/src/email.txt b/src/email.txt
new file mode 100644
index 0000000..7ac06e8
--- /dev/null
+++ b/src/email.txt
@@ -0,0 +1,44 @@
+[VOTE] Release Apache Hadoop ${hadoop.version}
+
+I have put together a release candidate (${rc}) for Hadoop ${hadoop.version}
+
+The RC is available at:
+https://dist.apache.org/repos/dist/dev/hadoop/${rc.name}/
+
+The git tag is ${tag.name}, commit d37586cbda3
+
+The maven artifacts are staged at
+${nexus.staging.url}
+
+You can find my public key at:
+https://dist.apache.org/repos/dist/release/hadoop/common/KEYS
+
+Change log
+https://dist.apache.org/repos/dist/dev/hadoop/${rc.name}/CHANGELOG.md
+
+Release notes
+https://dist.apache.org/repos/dist/dev/hadoop/${rc.name}/RELEASENOTES.md
+
+There's a very small number of changes, primarily critical code/packaging
+issues and security fixes.
+
+* The critical fixes which shipped in the 3.2.3 release.
+* CVEs in our code and dependencies
+* Shaded client packaging issues.
+* A switch from log4j to reload4j
+
+reload4j is an active fork of the log4j 1.17 library with the classes
+which contain CVEs removed. Even though hadoop never used those classes,
+they regularly raised alerts on security scans and concen from users.
+Switching to the forked project allows us to ship a secure logging
+framework. It will complicate the builds of downstream
+maven/ivy/gradle projects which exclude our log4j artifacts, as they
+need to cut the new dependency instead/as well.
+
+See the release notes for details.
+
+This is the second release attempt. It is the same git commit as before, but
+fully recompiled with another republish to maven staging, which has bee
+verified by building spark, as well as a minimal test project.
+
+Please try the release and vote. The vote will run for 5 days.


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to