This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/hadoop-release-support.git

commit dad4cbecb1de3bfb58c8f82810a870ed3be2de18
Author: Steve Loughran <ste...@cloudera.com>
AuthorDate: Thu Feb 22 18:12:33 2024 +0000

    Improving artifact validation
---
 NOTICE                                     |  1 -
 README.md                                  | 55 +++++++++++++++++++++----
 build.xml                                  | 66 +++++++++++++++++++++++++++---
 pom.xml                                    |  2 +
 src/releases/release-info-3.4.1.properties | 33 +++++++++++++++
 5 files changed, 142 insertions(+), 15 deletions(-)

diff --git a/NOTICE b/NOTICE
index b25576f..f35f47c 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,4 +1,3 @@
-lApache Template
 Copyright 2022 The Apache Software Foundation
 
 This product includes software developed at
diff --git a/README.md b/README.md
index a38992e..cb4fac6 100644
--- a/README.md
+++ b/README.md
@@ -16,6 +16,8 @@
 
 This project helps create validate hadoop release candidates
 
+https://github.com/apache/hadoop-release-support
+
 It has an Apache Ant `build.xml` file to help with preparing the release,
 validating gpg signatures, creating release messages and other things.
 
@@ -27,11 +29,16 @@ the classpath.
 
 Installed applications/platforms
 
-Java 8+. Later releases are valid for validation too.
+* Java 8+. Later releases are valid for validation too.
+* Apache Ant.
+* Apache maven
+* gpg
+* git
+* subversion (for staging artifacts; not needed for validation)
 
-Apache Ant.
+### Ant setup
 
-To use the scp/ssh commands we n
+To use the scp/ssh commands we need the jsch jar on the classpath.
 ```
 ant -diagnostics
 ```
@@ -55,6 +62,7 @@ apt-get install maven
 apt-get install subversion
 ```
 
+
 # Files
 
 ###  `/build.xml`
@@ -142,6 +150,15 @@ point to the newly created file.
 release.info.file=src/releases/release-info-X.Y.Z.properties
 ```
 
+#### Switching to a new release on the command line
+
+You can switch to a new release on the command line; this is needed when
+validating PRs.
+
+ ```bash
+ant -Drelease.info.file=src/releases/release-info-3.4.1.properties
+ ```
+
 ### set up `build.properties`
 
 ```properties
@@ -175,7 +192,7 @@ And then purge all artifacts of that release from maven.
 This is critical when validating downstream project builds.
 
 ```bash
-ant purge-from-maven
+ant mvn-purge
 ```
 
 ### SCP RC down to `target/incoming`
@@ -374,7 +391,7 @@ now is the time to use the keytool to declare that you 
trust them
 
 This puts the built artifacts into the local maven repo so
 do not do this while building/testing downstream projects
-*and call `ant purge-from-maven` after*
+*and call `ant mvn-purge` after*
 
 ```bash
 ant release.src.untar release.src.build
@@ -431,7 +448,7 @@ First, purge your maven repository of all hadoop- JAR files 
of the
 pending release version
 
 ```bash
-ant purge-from-maven
+ant mvn-purge
 ```
 
 ## execute the maven test.
@@ -642,7 +659,7 @@ For safety, purge your maven repo of all versions of the 
release, so
 as to guarantee that everything comes from the production store.
 
 ```bash
-ant purge-from-maven
+ant mvn-purge
 ```
 # tips
 
@@ -732,7 +749,7 @@ ant print-tag-command
 Remove downloaded files and maven artifactgs
 
 ```bash
-ant clean purge-from-maven
+ant clean mvn-purge
 ```
 
 
@@ -745,3 +762,25 @@ ant stage-svn-rollback
 # and get the log
 ant stage-svn-log
 ```
+
+# Contributing to this module
+
+There are lots of opportunities to contribute to the module
+* New ant targets for more stages of the process, including automating more 
release steps
+* Extending the maven module dependencies
+* Adding more artifacts to the forbidden list
+* Adding more validation tests to the maven test suites
+* Adding more commands to execute against a distribution
+* Adding github actions to help validate the module itself.
+
+During the release phase of a Hadoop release: whatever is needed
+to ship!
+
+This repo works on Commit-then-Review; that is: no need to wait for
+review by others before committing.
+This is critical for rapid evolution during the release process.
+Just expect to be required to justify changes after the fact.
+
+* Contributions by non-committers should be submitted as github PRs.
+* Contributions by committers MAY be just done as commits to the main branch.
+* The repo currently supports forced push to the main branch. We may need to 
block this
diff --git a/build.xml b/build.xml
index d93499b..45fc01e 100644
--- a/build.xml
+++ b/build.xml
@@ -92,7 +92,8 @@
   <setpath name="incoming.dir" location="${downloads.dir}/incoming"/>
   <setpath name="src" location="src"/>
   <setpath name="target" location="target"/>
-
+  <!-- env. properties -->
+  <property environment="env"/>
   <!--
   Load the user/installation specific properties.
   As these fix the properties for the test of the build,
@@ -282,8 +283,14 @@
     <delete dir="${downloads.dir}"/>
   </target>
 
-  <target name="purge-from-maven" depends="init"
-    description="purge all artifacts from the maven repo">
+  <!-- ========================================================= -->
+  <!--  -->
+  <!-- ========================================================= -->
+
+
+  <!-- Do this to ensure that all builds are from the staging repo artifacts 
-->
+  <target name="mvn-purge" depends="init"
+    description="purge all artifacts from the local maven repository">
     <property name="mvn.repo"
       location="${user.home}/.m2/repository"/>
     <property name="hadoop.artifacts"
@@ -300,23 +307,66 @@
   </target>
 
   <target name="mvn-test" depends="init"
-    description="build and test the maven module, use the build.properties 
hadoop version">
-
+    description="build and test the maven module">
     <mvn>
       <arg value="test"/>
       <arg value="-Pstaging"/>
       <arg value="-Dhadoop.version=${hadoop.version}"/>
       <arg value="-U"/>
     </mvn>
-    <mvn>
+
+  </target>
+
+  <target name="mvn-dependencies" depends="init"
+    description="build the dependency tree of the maven artifacts">
+
+    <!-- execute mvn dependency:tree saving the output to a file -->
+    <setpath name="mvndeps.out" location="${target}/mvndeps.txt"/>
+    <mvn output="${mvndeps.out}">
       <arg value="dependency:tree"/>
       <arg value="-Pstaging"/>
       <arg value="-Dverbose"/>
       <arg value="-Dhadoop.version=${hadoop.version}"/>
     </mvn>
+    <!-- load and print the output -->
+    <loadfile property="mvndeps" srcFile="${mvndeps.out}"/>
+    <echo>${mvndeps}</echo>
+  </target>
+
+  <target name="mvn-validate-dependencies"
+    depends="mvn-dependencies"
+    description="make sure no forbidden artifacts are found">
+
+    <!--
+     fails if the artifact string prefixed by : is found
+     it's possible to block explicit versions here
+     -->
+    <macrodef name="forbidden">
+      <attribute name="artifact"/>
+      <sequential>
+        <fail message="forbidden artifact found: @{artifact}">
+          <condition>
+            <contains string="${mvndeps}" substring=":@{artifact}"/>
+          </condition>
+        </fail>
+      </sequential>
+    </macrodef>
+    <echo>Dependencies are listed in ${mvndeps.out}</echo>
+    <forbidden artifact="protobuf-java:jar:2.5.0"/>
+    <forbidden artifact="slf4j-log4j12"/>
+    <forbidden artifact="log4j12"/>
+    <!-- not probed for as maven pulls them in from zk, somehow -->
+<!--    <forbidden artifact="logback-classic"/>-->
+<!--    <forbidden artifact="logback-core"/>-->
   </target>
 
 
+  <!-- ========================================================= -->
+  <!-- When building on remote systems (EC2 etc) this pulls down -->
+  <!-- the artifacts for the next stages -->
+  <!-- ========================================================= -->
+
+
   <target name="scp-artifacts" depends="init"
     description="scp the artifacts from a remote host. may be slow">
     <fail unless="scp.hostname"/>
@@ -360,6 +410,10 @@
 
   </target>
 
+  <!-- ========================================================= -->
+  <!-- GPG  -->
+  <!-- ========================================================= -->
+
 
   <target name="gpg.keys" depends="init"
     description="fetch and import GPG keys">
diff --git a/pom.xml b/pom.xml
index e42e556..c9f697d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -36,6 +36,7 @@
 
   <dependencies>
 
+<!--
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -48,6 +49,7 @@
       <artifactId>hadoop-client-runtime</artifactId>
       <version>${hadoop.version}</version>
     </dependency>
+-->
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
diff --git a/src/releases/release-info-3.4.1.properties 
b/src/releases/release-info-3.4.1.properties
new file mode 100644
index 0000000..a2f48d9
--- /dev/null
+++ b/src/releases/release-info-3.4.1.properties
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# property file for 3.4.1
+hadoop.version=3.4.1-SNAPSHOT
+rc=RC0
+previous.version=3.4.1
+release.branch=3.4
+git.commit.id=branch-3.4
+
+jira.id=HADOOP-XXXXXX
+jira.title=Release 3.4.1
+
+amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-3.4.0-RC2
+arm.src.dir=${amd.src.dir}
+http.source=${amd.src.dir}
+asf.staging.url=https://repository.apache.org/content/repositories/orgapachehadoop-1402
+
+cloudstore.profile=sdk2
+


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to