This is an automated email from the ASF dual-hosted git repository. stevel pushed a commit to branch main in repository https://gitbox.apache.org/repos/asf/hadoop-release-support.git
The following commit(s) were added to refs/heads/main by this push: new 7a421af HADOOP-19565. Release Hadoop 3.4.2 RC3 7a421af is described below commit 7a421af7439c3a81d9e8dd084ef9f290d2cbd10e Author: Steve Loughran <ste...@cloudera.com> AuthorDate: Tue Aug 26 10:24:41 2025 +0100 HADOOP-19565. Release Hadoop 3.4.2 RC3 Ant targets to copy core-site.xml and auth-key.xml files into position for testing --- README.md | 25 +++++++++++-- build.xml | 59 ++++++++++++++++-------------- src/releases/release-info-3.4.2.properties | 6 +-- 3 files changed, 57 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 0db9008..453ed14 100644 --- a/README.md +++ b/README.md @@ -533,7 +533,7 @@ If you don't yet trust the key of whoever signed the release then 2. Perform whatever key verification you can and sign the key that level -ideally push up the signature to the servers. -### Untar source and build. +### Untar source and build This puts the built artifacts into the local maven repo so do not do this while building/testing downstream projects @@ -612,6 +612,17 @@ the binaries are not present). The ant build itself will succeed, even if the `checknative` command reports a failure. +## Cloud connector integration tests + +To test cloud connectors you need the relevant credentials copied into place into their `src/test/resources` subdirectory, as covered in the appropriate documentation for each component. + +The location of this file must be defined in the property `auth-keys.xml`. + +```properties +auth-keys.xml=/home/alice/private/xml/auth-keys.xml +``` + + ## Testing ARM binaries There are ARM variants of the commands to fetch and validate the ARM binaries. @@ -713,15 +724,23 @@ ant cloudstore.build * This is java 11+ only. -Ideally, you should run the tests, or even better, run them before the RC is up for review. +Ideally, you should run the tests, or even better, run them before the RC is up for review, so as to identify which failures are actually regressions. -Building the libraries. +### Building the GCS library Do this only if you aren't running the tests. ```bash ant gcs.build ``` +### Testing the GCS library + +Requires the source tree to be set up for test runs, including login credentials. + +```bash +ant gcs.test +``` + ## Build Apache Spark Validates hadoop client artifacts; the cloud tests cover hadoop cloud storage clients. diff --git a/build.xml b/build.xml index 96eda40..ce473bd 100644 --- a/build.xml +++ b/build.xml @@ -27,7 +27,7 @@ * src/releases/${release.info.file}: the actual release properties which contains things like git tag, release and RC version etc. SCM-managed. - + hadoop version is set in the property hadoop.version build.properties is required to set source of RC tarball @@ -114,7 +114,7 @@ --> <!--suppress AntResolveInspection --> <property file="build.properties"/> - + <!-- Load the release.properties file This MUST contain a property "release.version" which is matched @@ -132,7 +132,7 @@ Validate the settings, build the ant properties, define macros etc. --> - + <target name="init" description="initialize"> <!-- base name of a release --> @@ -217,7 +217,9 @@ <!-- version of the AWS SDK to copy around --> <set name="aws.sdk2.version" value="2.29.52"/> <setpath name="mvn.dependencies.dir" location="target/dependency" /> - + <setpath name="src.build.dir" location="${release.source.dir}/${release}-src"/> + <setpath name="aws.src.build.dir" location="${src.build.dir}/hadoop-tools/hadoop-aws"/> + <setpath name="azure.src.build.dir" location="${src.build.dir}/hadoop-tools/hadoop-azure"/> <echo> subsidiary build options only if explicitly set @@ -349,14 +351,18 @@ <presetdef name="hadoop"> <exec failonerror="true" executable="bin/hadoop" - dir="${hadoop.home}" /> + dir="${hadoop.home}" > + <env key="HADOOP_OPTIONAL_TOOLS" value="hadoop-azure,hadoop-aws" /> + </exec> </presetdef> <!-- quiet hadoop--> <presetdef name="hadoopq"> <exec failonerror="false" executable="bin/hadoop" - dir="${hadoop.home}" /> + dir="${hadoop.home}" > + <env key="HADOOP_OPTIONAL_TOOLS" value="hadoop-azure,hadoop-aws" /> + </exec> </presetdef> </target> @@ -366,7 +372,7 @@ <target name="ant"> <echo>duplicate ant on the command line</echo> </target> - + <target name="clean" description="clean up target/ dir"> <!-- Delete the ${dist} directory trees --> @@ -933,7 +939,7 @@ Message is in file ${message.out} </target> - <target name="gi" if="bigdata-interop.dir" + <target name="gcs.test" if="bigdata-interop.dir" depends="init" description="Build and test the google gcs artifacts"> <echo> @@ -1259,39 +1265,38 @@ Message is in file ${message.out} todir="${hadoop.tools.lib.dir}" /> </target> - <target name="s3a.commands" depends="init" - description="run s3a commands"> - <setpath name="s3guard.out.file" location="${target}/s3guard.txt"/> + <!-- copy the core site.xml file from a location --> + <target name="hadoop.core-site.xml" depends="init" > + <require p="core-site.xml"/> + <copy file="${core-site.xml}" todir="${hadoop.home}/etc/hadoop" /> + </target> - <!-- this doesn't work unless we get s3a onto the default cp (which we should) --> -<!-- + <target name="s3a.commands" depends="s3a.copy.sdk" + description="run s3a commands"> <hadoop> <arg value="org.apache.hadoop.util.FindClass"/> <arg value="locate"/> <arg value="software.amazon.awssdk.auth.credentials.AwsCredentialsProvider"/> </hadoop> - --> + <!-- Without auth this will fail, so look for the auth failure string. of course, this means that in a host with aith (env vers, instance metadata) this check will fail. Whoever encounters that problem gets to fix this. --> - <hadoopq output="${s3guard.out.file}"> + <hadoop> <arg value="s3guard" /> <arg value="bucket-info" /> <arg value="s3a://noaa-cors-pds/" /> - </hadoopq> - <loadfile property="s3guard-out" srcFile="${s3guard.out.file}"/> - <echo>${s3guard-out}</echo> - <fail message="s3guard failed for a reason other than auth"> - <condition> - <not> - <contains string="${s3guard-out}" substring="NoAuthWithAWSException"/> - </not> - </condition> - </fail> + </hadoop> + + </target> + <!-- copy the auth-keys.xml file to the test dir --> + <target name="aws.auth-keys.xml" depends="init" > + <require p="auth-keys.xml"/> + <copy file="${auth-keys.xml}" todir="${aws.src.build.dir}/src/test/resources" /> </target> @@ -1386,7 +1391,7 @@ ${lean.asc} Binary release expanded into target/arm-untar/${release} </echo> </target> - + <target name="release.arm.commands" depends="init" description="run test hadoop commands "> @@ -1546,7 +1551,7 @@ ${lean.asc} <echo>${user-email-announcement.txt}</echo> <echo file="${user-email-announcement.out}">${user-email-announcement.txt}</echo> - + </target> <target name="release.site.docs" diff --git a/src/releases/release-info-3.4.2.properties b/src/releases/release-info-3.4.2.properties index 5f30cfe..a82d6a8 100644 --- a/src/releases/release-info-3.4.2.properties +++ b/src/releases/release-info-3.4.2.properties @@ -16,11 +16,11 @@ # property file for 3.4.2 hadoop.version=3.4.2 -rc=RC2 +rc=RC3 category=-lean previous.version=3.4.1 release.branch=3.4.2 -git.commit.id=e1c0dee881820a4d834ec4a4d2c70d0d953bb933 +git.commit.id=84e8b89ee2ebe6923691205b9e171badde7a495c aws.sdk2.version=2.29.52 jira.id=HADOOP-19565 @@ -29,5 +29,5 @@ jira.title=Release Hadoop 3.4.2 amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/3.4.2-RC2 arm.src.dir=${amd.src.dir} http.source=${amd.src.dir} -asf.staging.url=https://repository.apache.org/content/repositories/orgapachehadoop-1437 +asf.staging.url=https://repository.apache.org/content/repositories/orgapachehadoop-1443 --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org