This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/hadoop-release-support.git

commit 6f73d846cd44b03c4d7ed834e88dc529561d6d6b
Author: Steve Loughran <ste...@cloudera.com>
AuthorDate: Wed Aug 3 19:21:00 2022 +0100

    more docs, more libraries to build, targets to invoke the tests
---
 README.md | 164 +++++++++++++++++++++++++++++++++++++++++++++++++-------------
 build.xml |  89 +++++++++++++++++++++++++++++++---
 2 files changed, 211 insertions(+), 42 deletions(-)

diff --git a/README.md b/README.md
index dd353d2..098f226 100644
--- a/README.md
+++ b/README.md
@@ -7,9 +7,7 @@ validating gpg signatures, creating release messages and other 
things.
 
 # ant builds
 
-Look in the build.xml for details, including working with other modules
-
-
+see below
 
 
 
@@ -43,36 +41,7 @@ To purge all artifacts of the chosen hadoop version from 
your local maven reposi
 ant purge
 ```
 
-# download and build someone else's release candidate
-
-In build properties, declare `hadoop.version`, `rc` and `http.source`
-
-```properties
-hadoop version=2.10.2
-rc=0
-http.source=https://home.apache.org/~iwasakims/hadoop-2.10.2-RC0/
-```
-
-targets of relevance
-
-| target             | action                     |
-|--------------------|----------------------------|
-| release.fetch.http | fetch artifacts            |
-| release.dir.check  | verify release dir exists  |
-| release.src.untar  | untar retrieved artifacts  |
-| release.src.build  | build the source           |
-| release.src.test   | build and test the source  |
-| gpg.keys           | import the hadoop KEYS     |
-| gpg.verify         | verify the D/L'd artifacts |
-|                    |                            |
-|                    |                            |
-|                    |                            |
-|                    |                            |
-|                    |                            |
-
-set `release.native.binaries` to false to skip native binary checks on 
platforms without them
-
-## workflow for preparing an RC
+# workflow for preparing an RC
 
 Build the RC using the docker process on whichever host is set to do it
 
@@ -135,6 +104,9 @@ ant gpg.keys gpg.verify
 When committed to svn it will be uploaded and accessible via an
 https://svn.apache.org URL.
 
+```bash
+ant stage
+```
 When the RC is released, an `svn move` operation can promote it
 directly.
 
@@ -156,6 +128,13 @@ This isn't automated as it needs to be done in the source 
tree.
 ant print-tag-command
 ```
 
+### Prepare for the email
+
+1. Go to https://repository.apache.org/#stagingRepositories
+2. Find the hadoop repo for the RC
+3. "close" it and wait for that to go through
+
+
 ### Generate the RC vote email
 
 Review/update template message in `src/email.txt`.
@@ -169,4 +148,121 @@ The message is printed and saved to the file 
`target/email.txt`
 
 *do not send it until you have validated the URLs resolve*
 
-## Notes
+Now wait for the votes to come in. This is a good time to
+repeat all the testing of downstream projects, this time
+validating the staged artifacts, rather than any build
+locally.
+
+# How to download and build someone else's release candidate
+
+In build properties, declare `hadoop.version`, `rc` and `http.source`
+
+```properties
+hadoop.version=3.3.4
+rc=1
+http.source=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-3.3.4-RC1/
+```
+
+targets of relevance
+
+| target             | action                     |
+|--------------------|----------------------------|
+| release.fetch.http | fetch artifacts            |
+| release.dir.check  | verify release dir exists  |
+| release.src.untar  | untar retrieved artifacts  |
+| release.src.build  | build the source           |
+| release.src.test   | build and test the source  |
+| gpg.keys           | import the hadoop KEYS     |
+| gpg.verify         | verify the D/L'd artifacts |
+|                    |                            |
+
+
+set `release.native.binaries` to false to skip native binary checks on 
platforms without them
+
+### Download the RC files from the http server
+
+```bash
+ant release.fetch.http
+```
+
+### untar and build.
+
+This puts the built artifacts into the local maven repo so
+do not do this while building/testing downstream projects
+*and call `ant purge-from-maven` after*
+
+```bash
+ant release.src.untar release.src.build
+```
+
+
+# Building and testing projects from the staged maven artifacts
+
+A lot of the targets build maven projects from the staged maven artifacts.
+
+For this to work
+1. check out the relevant projects somewhere
+2. set their location in the build.properties file
+3. make sure that the branch checked out is the one you want to build.
+   This matters for anyone who works on those other projects
+   on their own branches.
+4. Some projects need java11.
+
+First, purge your maven repo
+
+    
+```bash
+ant purge-from-maven
+```
+
+## Cloudstore
+
+No tests, sorry.
+
+```
+ant cloudstore.build
+```
+
+## Google GCS
+
+This is java11 only.
+ 
+Ideally, you should run the tests, or even better, run them before the RC is 
up for review.
+
+
+Building the libraries.
+Do this only if you aren't running the tests.
+
+```
+ant gcs.build
+```
+
+Testing the libraries
+```
+ant gcs.build
+```
+
+
+
+## Apache Spark
+
+Validates hadoop client artifacts; the cloud tests cover hadoop cloud storage 
clients.
+
+
+```bash
+ant spark.build
+```
+
+Then followup cloud examples if you are set up
+```bash
+ant cloud-examples.build
+ant cloud-examples.test
+```
+
+## HBase filesystem
+
+
+```bash
+ant hboss.build
+```
+
diff --git a/build.xml b/build.xml
index 954553b..3683742 100644
--- a/build.xml
+++ b/build.xml
@@ -53,6 +53,11 @@
 
   <!-- for spark builds -->
   <property name="spark.version" value="3.4.0-SNAPSHOT"/>
+  <!--  spark excludes hadoop-aws dependency and forces in their own
+        this fixes it to be in sync with hadoop 3.3.4 RC1
+        see https://issues.apache.org/jira/browse/SPARK-39969
+   -->
+  <property name="spark.aws.version" value="1.12.262"/>
 
 
   <property name="release" value="hadoop-${hadoop.version}"/>
@@ -61,8 +66,8 @@
   <property name="staged.artifacts.dir" location="${staging.dir}/${rc.name}"/>
 
   <property name="tag.name" value="release-${rc.name}"/>
-  <property name="nexus.staging.url"
-    value=""/>
+<!--  <property name="nexus.staging.url"
+    value=""/>-->
   <property name="release.untar.dir" location="${target}/untar"/>
   <property name="release.source.dir" location="${release.untar.dir}/source"/>
   <property name="release.bin.dir" location="${release.untar.dir}/bin"/>
@@ -122,6 +127,7 @@
 
       Fetching and validating artifacts in ${release.dir}
       release.dir=${release.dir}
+      nexus.staging.url=${nexus.staging.url}
 
       scp.source=${scp.source}
       http.source=${http.source}
@@ -148,6 +154,10 @@
     <delete dir="${target}"/>
   </target>
 
+  <target name="ant">
+    <echo>duplicate ant on the command line</echo>
+  </target>
+
   <target name="purge-from-maven" depends="init"
     description="purge all artifacts from the maven repo">
     <property name="mvn.repo"
@@ -265,10 +275,11 @@
   </target>
 
   <target name="stage" depends="init"
-    description="copy the RC to the svn staging dir">
+    description="move the RC to the svn staging dir">
 
     <fail message="unset: staging.dir" unless="staging.dir"/>
 
+    <echo>copying to ${staging.dir}</echo>
     <move
       file="${release.dir}"
       todir="${staging.dir}"/>
@@ -276,6 +287,10 @@
       <arg value="-l"/>
       <arg value="${staging.dir}"/>
     </x>
+    <echo>
+      Now go to the staging dir and add/commit
+
+    </echo>
 
   </target>
 
@@ -337,6 +352,7 @@ Message is in file ${message.out}
       <arg value="-DskipTests"/>
       <arg value="-Dmaven.javadoc.skip=true"/>
       <arg value="-Dhadoop.version=${hadoop.version}"/>
+      <arg value="-Daws.java.sdk.version=${spark.aws.version}"/>
       <arg value="clean"/>
       <arg value="install"/>
     </mvn>
@@ -386,6 +402,7 @@ Message is in file ${message.out}
       https://github.com/GoogleCloudPlatform/bigdata-intero
     </echo>
     <mvn dir="${bigdata-interop.dir}">
+      <arg value="-T 1C"/>
       <arg value="-Psnapshots-and-staging"/>
       <arg value="-DskipTests"/>
       <arg value="-Dhadoop.version=${hadoop.version}"/>
@@ -395,6 +412,23 @@ Message is in file ${message.out}
     </mvn>
   </target>
 
+
+  <target name="gi" if="bigdata-interop.dir"
+    depends="init"
+    description="Build and test the google gcs artifacts">
+    <echo>
+      Test the google gcs artifacts. Requires GCS credentials.
+    </echo>
+    <mvn dir="${bigdata-interop.dir}">
+      <arg value="-T 1C"/>
+      <arg value="-Psnapshots-and-staging"/>
+      <arg value="-Dhadoop.version=${hadoop.version}"/>
+      <arg value="clean"/>
+      <arg value="package"/>
+      <arg value="install"/>
+    </mvn>
+  </target>
+
   <target name="hboss.build" if="hboss.dir"
     depends="init"
     description="Build the hboss artifacts">
@@ -415,6 +449,25 @@ Message is in file ${message.out}
       <arg value="install"/>
     </mvn>
   </target>
+  <target name="hboss.test" if="hboss.dir"
+    depends="init"
+    description="Build and test the hboss artifacts">
+    <echo>
+      Build the HBase HBoss module.
+      It's test are brittle to s3a internal changes, just because
+      it needs to plug in its own s3 client.
+
+      asf-staging is a profile in stevel's ~/.m2/settings.xml to
+      use the asf staging reop.
+    </echo>
+    <mvn dir="${hboss.dir}">
+      <arg value="-Pasf-staging"/>
+      <arg value="-Dhadoop.version=${hadoop.version}"/>
+      <arg value="-Dhadoop33.version=${hadoop.version}"/>
+      <arg value="clean"/>
+      <arg value="install"/>
+    </mvn>
+  </target>
 
 
   <target name="cloudstore.build" if="cloudstore.dir"
@@ -435,6 +488,23 @@ Message is in file ${message.out}
   </target>
 
 
+  <target name="fsapi.test" if="fs-api-shim.dir"
+    depends="init"
+    description="Build and test fs-api-shim">
+    <echo>
+      Build the fs api shim module.
+      This MUST build against hadoop-3.2.0; the test version is
+      what we want here.
+    </echo>
+    <mvn dir="${fs-api-shim.dir}">
+      <arg value="-Psnapshots-and-staging"/>
+      <arg value="-Dhadoop.test.version=${hadoop.version}"/>
+      <arg value="clean"/>
+      <arg value="test"/>
+    </mvn>
+  </target>
+
+
   <!--  Fetch the artifacts from an http repo, for validating someone else's 
release.
    the download is into incoming.dir, then after a cleanup copied into 
release.dir; -->
   <target name="release.fetch.http" depends="init"
@@ -463,6 +533,8 @@ Message is in file ${message.out}
 
   <target name="release.src.untar" depends="release.dir.check"
     description="untar the release source">
+    <echo>untarring source ${release.dir}/${release}-src.tar.gz</echo>
+    <mkdir dir="target/untar"/>
 
     <gunzip src="${release.dir}/${release}-src.tar.gz" dest="target/untar"/>
     <untar src="target/untar/${release}-src.tar" dest="${release.source.dir}" 
/>
@@ -486,12 +558,13 @@ Message is in file ${message.out}
   </target>
 
   <target name="release.bin.untar" depends="release.dir.check"
-    description="untar the release">
+    description="untar the binary release">
 
-    <gunzip src="${release.dir}/${release}.tar.gz" dest="target/untar"/>
+    <mkdir dir="target/bin-untar" />
+    <gunzip src="${release.dir}/${release}.tar.gz" dest="target/bin-untar"/>
 
     <!--  use the native command to preserve properties -->
-    <x executable="tar" dir="target/untar" >
+    <x executable="tar" dir="target/bin-untar" >
       <arg value="-xf" />
       <arg value="${release}.tar" />
     </x>
@@ -508,14 +581,14 @@ Message is in file ${message.out}
     <presetdef name="hadoop">
       <exec failonerror="true"
         executable="bin/hadoop"
-        dir="target/untar/${release}" />
+        dir="target/bin-untar/${release}" />
     </presetdef>
 
     <!--    quiet hadoop-->
     <presetdef name="hadoopq">
       <exec failonerror="false"
         executable="bin/hadoop"
-        dir="target/untar/${release}" />
+        dir="target/bin-untar/${release}" />
     </presetdef>
     <echo>ls</echo>
     <hadoop>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to