Repository: hadoop Updated Branches: refs/heads/branch-2 54803ebe4 -> 762125b86 refs/heads/branch-2.9 39e1f963b -> 6024b3a2a
Backport HADOOP-13514 (surefire upgrade) to branch-2 Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/762125b8 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/762125b8 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/762125b8 Branch: refs/heads/branch-2 Commit: 762125b864ab812512bad9a59344ca79af7f43ac Parents: 54803eb Author: Chris Douglas <[email protected]> Authored: Mon Feb 26 16:32:06 2018 -0800 Committer: Chris Douglas <[email protected]> Committed: Mon Feb 26 16:32:06 2018 -0800 ---------------------------------------------------------------------- BUILDING.txt | 4 ++-- hadoop-project/pom.xml | 5 +++-- hadoop-tools/hadoop-aws/pom.xml | 2 ++ hadoop-tools/hadoop-azure/pom.xml | 3 +++ 4 files changed, 10 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/762125b8/BUILDING.txt ---------------------------------------------------------------------- diff --git a/BUILDING.txt b/BUILDING.txt index 9c1fbd6..e7701a5 100644 --- a/BUILDING.txt +++ b/BUILDING.txt @@ -267,10 +267,10 @@ If the build process fails with an out of memory error, you should be able to fi it by increasing the memory used by maven which can be done via the environment variable MAVEN_OPTS. -Here is an example setting to allocate between 256 and 512 MB of heap space to +Here is an example setting to allocate between 256 MB and 1 GB of heap space to Maven -export MAVEN_OPTS="-Xms256m -Xmx512m" +export MAVEN_OPTS="-Xms256m -Xmx1g" ---------------------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/762125b8/hadoop-project/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 6ba1ced..f560088 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -111,7 +111,7 @@ <!-- Plugin versions and config --> <maven-surefire-plugin.argLine>-Xmx2048m -XX:MaxPermSize=768m -XX:+HeapDumpOnOutOfMemoryError</maven-surefire-plugin.argLine> - <maven-surefire-plugin.version>2.17</maven-surefire-plugin.version> + <maven-surefire-plugin.version>2.20.1</maven-surefire-plugin.version> <maven-surefire-report-plugin.version>${maven-surefire-plugin.version}</maven-surefire-report-plugin.version> <maven-failsafe-plugin.version>${maven-surefire-plugin.version}</maven-failsafe-plugin.version> @@ -1521,6 +1521,7 @@ <DYLD_LIBRARY_PATH>${env.DYLD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib</DYLD_LIBRARY_PATH> <MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX> </environmentVariables> + <trimStackTrace>false</trimStackTrace> <systemPropertyVariables> <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir> @@ -1531,7 +1532,7 @@ <test.build.data>${test.build.data}</test.build.data> <test.build.webapps>${test.build.webapps}</test.build.webapps> <test.cache.data>${test.cache.data}</test.cache.data> - <test.build.classes>${test.build.classes}</test.build.classes> + <test.build.classes>${project.build.directory}/test-classes</test.build.classes> <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack> <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf> http://git-wip-us.apache.org/repos/asf/hadoop/blob/762125b8/hadoop-tools/hadoop-aws/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/pom.xml b/hadoop-tools/hadoop-aws/pom.xml index d8b54c4..659af93 100644 --- a/hadoop-tools/hadoop-aws/pom.xml +++ b/hadoop-tools/hadoop-aws/pom.xml @@ -153,6 +153,7 @@ <reuseForks>false</reuseForks> <argLine>${maven-surefire-plugin.argLine} -DminiClusterDedicatedDirs=true</argLine> <forkedProcessTimeoutInSeconds>${fs.s3a.scale.test.timeout}</forkedProcessTimeoutInSeconds> + <trimStackTrace>false</trimStackTrace> <systemPropertyVariables> <!-- Tell tests that they are being executed in parallel --> <test.parallel.execution>true</test.parallel.execution> @@ -209,6 +210,7 @@ </goals> <configuration> <forkedProcessTimeoutInSeconds>${fs.s3a.scale.test.timeout}</forkedProcessTimeoutInSeconds> + <trimStackTrace>false</trimStackTrace> <systemPropertyVariables> <!-- Tell tests that they are being executed sequentially --> <test.parallel.execution>false</test.parallel.execution> http://git-wip-us.apache.org/repos/asf/hadoop/blob/762125b8/hadoop-tools/hadoop-azure/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/pom.xml b/hadoop-tools/hadoop-azure/pom.xml index 244f432..c9325ff 100644 --- a/hadoop-tools/hadoop-azure/pom.xml +++ b/hadoop-tools/hadoop-azure/pom.xml @@ -326,6 +326,7 @@ <reuseForks>false</reuseForks> <argLine>${maven-surefire-plugin.argLine} -DminiClusterDedicatedDirs=true</argLine> <forkedProcessTimeoutInSeconds>${fs.azure.scale.test.timeout}</forkedProcessTimeoutInSeconds> + <trimStackTrace>false</trimStackTrace> <systemPropertyVariables> <!-- Tell tests that they are being executed in parallel --> <test.parallel.execution>true</test.parallel.execution> @@ -381,6 +382,7 @@ </goals> <configuration> <forkedProcessTimeoutInSeconds>${fs.azure.scale.test.timeout}</forkedProcessTimeoutInSeconds> + <trimStackTrace>false</trimStackTrace> <systemPropertyVariables> <test.parallel.execution>false</test.parallel.execution> <fs.azure.scale.test.enabled>${fs.azure.scale.test.enabled}</fs.azure.scale.test.enabled> @@ -431,6 +433,7 @@ <fs.azure.scale.test.timeout>${fs.azure.scale.test.timeout}</fs.azure.scale.test.timeout> </systemPropertyVariables> <forkedProcessTimeoutInSeconds>${fs.azure.scale.test.timeout}</forkedProcessTimeoutInSeconds> + <trimStackTrace>false</trimStackTrace> </configuration> </execution> </executions> --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
