This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/hadoop-release-support.git

commit bba572e146695c90eae19d0b5b76b949e9a248c1
Author: Steve Loughran <ste...@cloudera.com>
AuthorDate: Tue Feb 20 17:21:15 2024 +0000

    HADOOP-19018. Improving design of release build file
    
    Driven more by property files which are managed in git
---
 README.md                             |   3 +-
 build.xml                             | 304 +++++++++++++++++++++++-----------
 pom.xml                               |  41 +++--
 release.properties                    |  18 ++
 src/releases/release-3.4.0.properties |  36 ++++
 src/text/core-announcement.txt        |   6 +-
 6 files changed, 293 insertions(+), 115 deletions(-)

diff --git a/README.md b/README.md
index f6b37a3..9b4c3fd 100644
--- a/README.md
+++ b/README.md
@@ -316,7 +316,8 @@ ant cloudstore.build
 
 [Big Data Interop](https://github.com/GoogleCloudPlatform/bigdata-interop).
 
-This is java 11+ only.
+* This is java 11+ only.
+* currently only builds against AWS v1 SDK.
 
 Ideally, you should run the tests, or even better, run them before the RC is 
up for review.
 
diff --git a/build.xml b/build.xml
index 5fb99c1..ad6f8bd 100644
--- a/build.xml
+++ b/build.xml
@@ -16,96 +16,131 @@
   ~ limitations under the License.
   -->
 
-<project name="release-validator" default="init" basedir=".">
+<project name="hadoop-release" default="init" basedir=".">
   <description>
     build file to manage release and validation of artifacts.
-    Maven is one of the targets here.
-
+    Maven is simply one of the targets here.
+
+    It loads three property files in order:
+    * build.properties: custom deployment options. User managed.
+    * release.properties: pointer to the real release file. SCM-managed.
+    * src/releases/${release.property.file}: the actual release properties
+      which contains things like git tag, release and RC version etc.
+      SCM-managed.
+    
     hadoop version is set in the property hadoop.version
     build.properties is required to set source of RC tarball
 
-    All the complex commands are done by executing the unix commands;
+    Most of the complex commands are done by executing the unix commands;
     this build file sets them up by building the commands properly.
 
-    for building other modules to work, this ant build must be on java11
+    For building other modules, this ant build must be on java11
+    and have the maven executable in the path.
+    Note: some libraries (e.g. Avro) have even higher requirements.
+    Enjoy.
+
+    Text announcement templates are in
+    src/text
+    These are expanded with property expansion.
 
-    For validating artifacts put up as an an RC, use the http-artifacts target
-    to retrieve, with http.source set to the url, e.g
-    http.source=https://home.apache.org/~iwasakims/hadoop-2.10.2-RC0/
   </description>
-  <!-- set global properties for this build -->
+
+  <!-- set immutable properties for this build -->
   <property name="src" location="src"/>
   <property name="home" location="${user.home}"/>
   <property name="target" location="target"/>
-  <!--suppress AntResolveInspection -->
-  <property file="build.properties"/>
-
-
-
   <property name="downloads.dir" location="downloads"/>
   <property name="dist.dir" location="${downloads.dir}/dist"/>
   <property name="incoming.dir" location="${downloads.dir}/incoming"/>
+  
 
+  <!-- foundational macro to fail if a property is unset-->
+  <macrodef name="require">
+    <attribute name="p" />
+    <sequential>
+      <fail unless="@{p}" message="unset property @{p}" />
+    </sequential>
+  </macrodef>
+  
+  <!--
+  Load the user/installation specific properties.
+  As these fix the properties for the test of the build,
+  they allow for customization.
+  -->
+  <!--suppress AntResolveInspection -->
+  <property file="build.properties"/>
+  
+  <!--
+   Load the release.properties file
+   This MUST contain an absolute/relative path to the release.properties
+   file if not set in build.properties.
+   -->
+  <loadproperties srcFile="release.properties"/>
+  <require p="release.property.file"/>
+  <!-- and load the file it referencves.-->
+  <loadproperties srcFile="${release.property.file}"/>
 
-  <!--  base name of a release -->
-  <property name="hadoop.version" value="3.4"/>
-  <property name="rc" value="RC0"/>
-  <property name="rc.name" value="${hadoop.version}-${rc}"/>
-
- <!--  previous version, used in annoucements -->
-  <property name="previous.ver" value="3.3.6"/>
-  <property name="release.branch" value="3.4"/>
 
+  <!--
+   Load all property files, validate the settings, build the ant
+   properties, define macros etc.
+  -->
+  
+  <target name="init" description="initialize">
 
-  <property name="git.commit.id" value="706d88266ab"/>
-  <property name="jira.id" value="HADOOP-18470"/>
+    <!--  base name of a release -->
+    <require p="hadoop.version"/>
+    <!-- release candidate number-->
+    <require p="rc"/>
+    <!-- commit id of the RC -->
+    <require p="git.commit.id"/>
+    <!-- JIRA ID of the release -->
+    <require p="jira.id"/>
 
+    <!-- branch this release is from -->
+    <require p="release.branch"/>
 
 
-  <!-- for spark builds -->
-  <property name="spark.version" value="3.5.0-SNAPSHOT"/>
-  <!--  spark excludes hadoop-aws dependency and forces in their own
-        this fixes it to be in sync with hadoop
-        see https://issues.apache.org/jira/browse/SPARK-39969
-   -->
-  <property name="spark.aws.version" value="1.12.316"/>
+    <property name="rc.name" value="${hadoop.version}-${rc}"/>
 
+    <!--  previous version, used in announcements -->
+    <require p="previous.version"/>
 
-  <property name="release" value="hadoop-${hadoop.version}"/>
-  <property name="rc.dirname" value="${release}-${rc}"/>
-  <property name="release.dir" location="${downloads.dir}/${rc.dirname}"/>
 
-  <property name="tag.name" value="release-${rc.name}"/>
-<!--  <property name="nexus.staging.url"
-    value=""/>-->
-  <property name="release.untar.dir" location="${downloads.dir}/untar"/>
-  <property name="release.source.dir" location="${release.untar.dir}/source"/>
-  <property name="release.site.dir" location="${release.untar.dir}/site"/>
-  <property name="site.dir" 
location="${release.untar.dir}/site/r${hadoop.version}"/>
-  <property name="release.bin.dir" location="${release.untar.dir}/bin"/>
-  <property name="check.native.binaries" value="true"/>
-  <property name="arm.artifact.dir" 
location="${arm.hadoop.dir}/target/artifacts/" />
-  <property name="arm.dir" location="${downloads.dir}/arm" />
-  <property name="arm.binary.src" 
location="${arm.artifact.dir}/hadoop-${hadoop.version}.tar.gz" />
-  <property name="arm.binary.prefix" value="hadoop-${hadoop.version}-aarch64" 
/>
-  <property name="arm.binary.filename" value="${arm.binary.prefix}.tar.gz" />
-  <property name="arm.binary" location="${arm.dir}/${arm.binary.filename}" />
-  <property name="arm.binary.sha512" location="${arm.binary}.sha512" />
-  <property name="arm.binary.asc" location="${arm.binary}.asc" />
+    <property name="release" value="hadoop-${hadoop.version}"/>
+    <property name="rc.dirname" value="${release}-${rc}"/>
+    <property name="release.dir" location="${downloads.dir}/${rc.dirname}"/>
 
-  <property name="staged.artifacts.dir" 
location="${staging.dir}/${rc.dirname}"/>
+    <property name="tag.name" value="release-${rc.name}"/>
+    <!--  <property name="asf.staging.url"
+        value=""/>-->
+    <property name="release.untar.dir" location="${downloads.dir}/untar"/>
+    <property name="release.source.dir" 
location="${release.untar.dir}/source"/>
+    <property name="release.site.dir" location="${release.untar.dir}/site"/>
+    <property name="site.dir" 
location="${release.untar.dir}/site/r${hadoop.version}"/>
+    <property name="release.bin.dir" location="${release.untar.dir}/bin"/>
+    <property name="check.native.binaries" value="true"/>
+    <property name="arm.artifact.dir" 
location="${arm.hadoop.dir}/target/artifacts/" />
+    <property name="arm.dir" location="${downloads.dir}/arm" />
+    <property name="arm.binary.src" 
location="${arm.artifact.dir}/hadoop-${hadoop.version}.tar.gz" />
+    <property name="arm.binary.prefix" 
value="hadoop-${hadoop.version}-aarch64" />
+    <property name="arm.binary.filename" value="${arm.binary.prefix}.tar.gz" />
+    <property name="arm.binary" location="${arm.dir}/${arm.binary.filename}" />
+    <property name="arm.binary.sha512" location="${arm.binary}.sha512" />
+    <property name="arm.binary.asc" location="${arm.binary}.asc" />
 
-  <property name="staging.commit.msg" value="${jira.id}. Hadoop ${rc.name} 
built from ${git.commit.id}" />
+    <property name="staged.artifacts.dir" 
location="${staging.dir}/${rc.dirname}"/>
 
-  <property name="svn.apache.dist" value="https://dist.apache.org/"/>
-  <property name="svn.staging.url" 
value="${svn.apache.dist}/repos/dist/dev/hadoop/${rc.dirname}"/>
-  <property name="svn.production.url" 
value="${svn.apache.dist}/repos/dist/release/hadoop/common/${release}"/>
-  <property name="production.commit.msg" value="${jira.id}. Releasing Hadoop 
${hadoop.version}" />
+    <property name="staging.commit.msg" value="${jira.id}. Hadoop ${rc.name} 
built from ${git.commit.id}" />
 
-  <property name="bin-untar.dir" location="target/bin-untar/${release}"/>
+    <property name="svn.apache.dist" value="https://dist.apache.org/"/>
+    <property name="svn.staging.url" 
value="${svn.apache.dist}/repos/dist/dev/hadoop/${rc.dirname}"/>
+    <property name="svn.production.url" 
value="${svn.apache.dist}/repos/dist/release/hadoop/common/${release}"/>
+    <property name="production.commit.msg" value="${jira.id}. Releasing Hadoop 
${hadoop.version}" />
 
-  <target name="init">
+    <property name="bin-untar.dir" location="target/bin-untar/${release}"/>
 
+    <!-- exec() where failures fail the build. -->
     <presetdef name="x">
       <exec failonerror="true"/>
     </presetdef>
@@ -131,43 +166,35 @@
     <!-- require a dir to exist. -->
     <macrodef name="require-dir">
       <attribute name="path" />
-        <sequential>
-          <fail message="dir missing: @{path}">
-            <condition>
-              <not>
-                <available file="@{path}"/>
-              </not>
-            </condition>
-          </fail>
-        </sequential>
+      <sequential>
+        <fail message="dir missing: @{path}">
+          <condition>
+            <not>
+              <available file="@{path}"/>
+            </not>
+          </condition>
+        </fail>
+      </sequential>
     </macrodef>
 
     <!-- require a file to exist -->
     <macrodef name="require-file">
       <attribute name="path" />
-        <sequential>
-          <fail message="file missing: @{path}">
-            <condition>
-              <not>
-                <available file="@{path}"/>
-              </not>
-            </condition>
-          </fail>
-        </sequential>
+      <sequential>
+        <fail message="file missing: @{path}">
+          <condition>
+            <not>
+              <available file="@{path}"/>
+            </not>
+          </condition>
+        </fail>
+      </sequential>
     </macrodef>
 
     <presetdef name="verify-release-dir">
       <require-dir path="${release.dir}" />
     </presetdef>
 
-    <macrodef name="require">
-      <attribute name="p" />
-        <sequential>
-          <fail unless="@{p}" message="unset property @{p}" />
-        </sequential>
-    </macrodef>
-
-
     <mkdir dir="${downloads.dir}"/>
 
     <property name="scp.source"
@@ -184,7 +211,7 @@
 
       Fetching and validating artifacts in ${release.dir}
       release.dir=${release.dir}
-      nexus.staging.url=${nexus.staging.url}
+      asf.staging.url=${asf.staging.url}
 
       scp.source=${scp.source}
       http.source=${http.source}
@@ -205,6 +232,13 @@
     </echo>
   </target>
 
+  <!--
+  This is just here to stop problems if you double type ant.
+  -->
+  <target name="ant">
+    <echo>duplicate ant on the command line</echo>
+  </target>
+  
   <target name="clean"
     description="clean up target/ dir">
     <!-- Delete the ${dist} directory trees -->
@@ -212,10 +246,6 @@
     <delete dir="${downloads.dir}"/>
   </target>
 
-  <target name="ant">
-    <echo>duplicate ant on the command line</echo>
-  </target>
-
   <target name="purge-from-maven" depends="init"
     description="purge all artifacts from the maven repo">
     <property name="mvn.repo"
@@ -339,10 +369,14 @@
 
   </target>
 
+  <!-- ========================================================= -->
+  <!--  Staging operations  -->
+  <!-- ========================================================= -->
+
   <target name="stage" depends="init"
     description="move the RC to the svn staging dir">
 
-    <fail message="unset: staging.dir" unless="staging.dir"/>
+    <require p="staging.dir"/>
 
     <echo>moving ${release.dir} to ${staging.dir}</echo>
     <move
@@ -364,8 +398,8 @@
   <target name="staging-init"
     description="init svn staging"
     depends="init">
-    <fail unless="jira.id"/>
-    <fail unless="git.commit.id"/>
+    <require p="jira.id"/>
+    <require p="git.commit.id"/>
     <echo>
       staging.commit.msg = ${staging.commit.msg}
       production.commit.msg = ${production.commit.msg}
@@ -490,8 +524,8 @@
       Set the git commit number in git.commit.id
     </fail>
 
-    <fail unless="nexus.staging.url">
-      Set the nexus staging repository URL in nexus.staging.url
+    <fail unless="asf.staging.url">
+      Set the apache staging URL in asf.staging.url
     </fail>
 
     <loadfile property="message.txt"
@@ -512,8 +546,30 @@ Message is in file ${message.out}
 
   </target>
 
+  <!-- ========================================================= -->
+  <!-- Spark -->
+  <!-- ========================================================= -->
+
+
+  <!--
+  spark-specific init options etc
+  -->
+  <target name="spark.init" if="spark.dir"
+    depends="init">
+
+
+    <!-- for spark builds -->
+    <property name="spark.version" value="3.5.0-SNAPSHOT"/>
+    <!--  spark excludes hadoop-aws dependency and forces in their own
+          this fixes it to be in sync with hadoop
+          see https://issues.apache.org/jira/browse/SPARK-39969
+     -->
+    <property name="spark.aws.version" value="1.12.316"/>
+
+  </target>
+
   <target name="spark.build" if="spark.dir"
-    depends="init"
+    depends="spark.init"
     description="build the spark release in spark.dir">
     <echo>
 
@@ -601,6 +657,10 @@ Message is in file ${message.out}
     </mvn>
   </target>
 
+  <!-- ========================================================= -->
+  <!-- Google GCS. Java 11+ -->
+  <!-- ========================================================= -->
+
 
   <target name="gcs.build" if="bigdata-interop.dir"
     depends="init"
@@ -638,6 +698,12 @@ Message is in file ${message.out}
     </mvn>
   </target>
 
+  <!-- ========================================================= -->
+  <!-- HBoss: hbase filesystem wrapper -->
+  <!-- uses aws sdk APIs for its mocking -->
+  <!-- ========================================================= -->
+
+
   <target name="hboss.build" if="hboss.dir"
     depends="init"
     description="Build the hboss artifacts">
@@ -658,6 +724,7 @@ Message is in file ${message.out}
       <arg value="-DskipTests"/>
     </mvn>
   </target>
+
   <target name="hboss.test" if="hboss.dir"
     depends="init"
     description="Build and test the hboss artifacts">
@@ -679,6 +746,10 @@ Message is in file ${message.out}
     </mvn>
   </target>
 
+  <!-- ========================================================= -->
+  <!-- https://github.com/steveloughran/cloudstore  -->
+  <!-- ========================================================= -->
+
 
   <target name="cloudstore.build" if="cloudstore.dir"
     depends="init"
@@ -688,15 +759,22 @@ Message is in file ${message.out}
       if this is done with java11, it shouldn't be released.
 
     </echo>
+    <require p="cloudstore.profile"/>
+
     <mvn dir="${cloudstore.dir}">
       <arg value="-Psnapshots-and-staging"/>
-      <arg value="-Pextra"/>
+      <arg value="-P${cloudstore.profile}"/>
       <arg value="-Dhadoop.version=${hadoop.version}"/>
       <arg value="clean"/>
       <arg value="package"/>
+      <arg value="test"/>
     </mvn>
   </target>
 
+  <!-- ========================================================= -->
+  <!-- Hadoop FS API shim WiP -->
+  <!-- ========================================================= -->
+
 
   <target name="fsapi.test" if="fs-api-shim.dir"
     depends="init"
@@ -714,6 +792,10 @@ Message is in file ${message.out}
     </mvn>
   </target>
 
+  <!-- ========================================================= -->
+  <!-- Parquet -->
+  <!-- ========================================================= -->
+
   <target name="parquet.build" if="parquet.dir"
     depends="init"
     description="Build parquet">
@@ -725,8 +807,8 @@ Message is in file ${message.out}
     <mvn dir="${parquet.dir}">
       <arg value="-Dhadoop.version=${hadoop.version}"/>
       <arg value="-Pasf-staging"/>
-      <arg value="--pl"/>
-      <arg value="parquet-hadoop"/>
+<!--      <arg value="&#45;&#45;pl"/>
+      <arg value="parquet-hadoop"/>-->
       <arg value="clean"/>
       <arg value="install"/>
       <arg value="-DskipTests"/>
@@ -734,7 +816,7 @@ Message is in file ${message.out}
   </target>
 
   <target name="parquet.test" if="parquet.dir"
-    depends="init"
+    depends="parquet.build"
     description="Build and test the parquet-hadoop module">
     <echo>
       Build and test parquet-hadoop.
@@ -749,12 +831,18 @@ Message is in file ${message.out}
     </mvn>
   </target>
 
+  <!-- ========================================================= -->
+  <!-- Avro. Needs lots of jdk installations. -->
+  <!-- ========================================================= -->
+
   <target name="avro.build" if="avro.dir"
     depends="init"
     description="Build avro">
     <echo>
       Build avro.
-      Relies on the user having an asf-staging profile.
+      * Relies on the user having an asf-staging profile.
+      * needs a java8 toolchain set up
+        https://maven.apache.org/guides/mini/guide-using-toolchains.html
     </echo>
     <mvn dir="${avro.dir}/lang/java">
       <arg value="-Dhadoop.version=${hadoop.version}"/>
@@ -765,6 +853,10 @@ Message is in file ${message.out}
     </mvn>
   </target>
 
+  <!-- ========================================================= -->
+  <!-- Release targets -->
+  <!-- ========================================================= -->
+
 
   <!--  Fetch the artifacts from an http repo, for validating someone else's 
release.
    the download is into incoming.dir, then after a cleanup copied into 
release.dir; -->
@@ -798,6 +890,16 @@ Message is in file ${message.out}
     <echo>copied http downloaded artifacts to ${release.dir}</echo>
   </target>
 
+  <target name="release.fetch.arm" depends="init"
+    description="fetch the arm artifacts from a remote http site">
+    <fail unless="http.source"/>
+    <mkdir dir="${incoming.dir}"/>
+    <get src="${http.source}/${arm.binary.filename}"
+         dest="${incoming.dir}"
+         verbose="true"
+         usetimestamp="true"/>
+  </target>
+
   <target name="release.site.untar" depends="release.dir.check"
     description="untar the release site">
     <echo>untarring site ${release.dir}/${release}-site.tar.gz</echo>
diff --git a/pom.xml b/pom.xml
index a21de82..13dffbc 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,7 +37,7 @@
     <file.encoding>UTF-8</file.encoding>
     <javac.version>1.8</javac.version>
     <enforced.java.version>${javac.version}</enforced.java.version>
-    <maven-antrun-plugin.version>1.7</maven-antrun-plugin.version>
+    <maven-antrun-plugin.version>3.1.0</maven-antrun-plugin.version>
 
 
     <hadoop.version>3.4.0</hadoop.version>
@@ -278,48 +278,69 @@
         <!-- clean lifecycle, see 
https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle 
-->
         <plugin>
           <artifactId>maven-clean-plugin</artifactId>
-          <version>3.1.0</version>
+          <version>3.3.2</version>
         </plugin>
         <!-- default lifecycle, jar packaging: see 
https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging
 -->
         <plugin>
           <artifactId>maven-resources-plugin</artifactId>
-          <version>3.0.2</version>
+          <version>3.3.1</version>
         </plugin>
         <plugin>
           <artifactId>maven-compiler-plugin</artifactId>
-          <version>3.8.0</version>
+          <version>3.12.1</version>
         </plugin>
         <plugin>
           <artifactId>maven-surefire-plugin</artifactId>
-          <version>2.22.1</version>
+          <version>3.2.5</version>
         </plugin>
         <plugin>
           <artifactId>maven-jar-plugin</artifactId>
-          <version>3.0.2</version>
+          <version>3.3.0</version>
         </plugin>
         <plugin>
           <artifactId>maven-install-plugin</artifactId>
-          <version>2.5.2</version>
+          <version>3.1.1</version>
         </plugin>
         <plugin>
           <artifactId>maven-deploy-plugin</artifactId>
-          <version>2.8.2</version>
+          <version>3.1.1</version>
         </plugin>
         <!-- site lifecycle, see 
https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle 
-->
         <plugin>
           <artifactId>maven-site-plugin</artifactId>
-          <version>3.7.1</version>
+          <version>4.0.0-M13</version>
         </plugin>
         <plugin>
           <artifactId>maven-project-info-reports-plugin</artifactId>
-          <version>3.0.0</version>
+          <version>3.5.0</version>
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-antrun-plugin</artifactId>
           <version>${maven-antrun-plugin.version}</version>
         </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-enforcer-plugin</artifactId>
+          <version>3.4.1</version>
+          <executions>
+            <execution>
+              <id>enforce-maven</id>
+              <goals>
+                <goal>enforce</goal>
+              </goals>
+              <configuration>
+                <rules>
+                  <requireMavenVersion>
+                    <version>3.4.0</version>
+                  </requireMavenVersion>
+                </rules>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
       </plugins>
+
     </pluginManagement>
   </build>
 
diff --git a/release.properties b/release.properties
new file mode 100644
index 0000000..bb7dbf4
--- /dev/null
+++ b/release.properties
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# File to git-managed release info.
+release.property.file=src/releases/release-3.4.0.properties
\ No newline at end of file
diff --git a/src/releases/release-3.4.0.properties 
b/src/releases/release-3.4.0.properties
new file mode 100644
index 0000000..f874372
--- /dev/null
+++ b/src/releases/release-3.4.0.properties
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# property file for 3.4.0
+jira.id=HADOOP-19018
+jira.title=Release 3.4.0
+hadoop.version=3.4.0
+rc=RC2
+previous.version=3.3.6
+release.branch=3.4
+git.commit.id=88fbe62f27e
+amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-3.4.0-RC2/
+arm.src.dir=${amd.src.dir}
+http.source=${amd.src.dir}
+
+release.short=hadoop-${hadoop.version}
+release=hadoop-${hadoop.version}
+rc.dirname=${release}
+
+staging.url=https://repository.apache.org/content/repositories/orgapachehadoop-1402
+
+cloudstore.profile=sdk2
+
diff --git a/src/text/core-announcement.txt b/src/text/core-announcement.txt
index 524f5db..683af9f 100644
--- a/src/text/core-announcement.txt
+++ b/src/text/core-announcement.txt
@@ -15,14 +15,14 @@ Key changes include
   release -and therefore the maven artifacts.
 * Security fixes in Hadoop's own code.
 
-Users of Apache Hadoop ${previous.ver} and earlier should upgrade to
+Users of Apache Hadoop ${previous.version} and earlier should upgrade to
 this release.
 
 All users are encouraged to read the [overview of major changes][1]
-since release ${previous.ver}.
+since release ${previous.version}.
 
 For details of bug fixes, improvements, and other enhancements since
-the previous ${previous.ver} release, please check [release notes][2]
+the previous ${previous.version} release, please check [release notes][2]
 and [changelog][3].
 
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to