This is an automated email from the ASF dual-hosted git repository.
roryqi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-uniffle.git
The following commit(s) were added to refs/heads/master by this push:
new 889f67e2 [#957] feat(tez): Tez examples integration test (#982)
889f67e2 is described below
commit 889f67e21a50a69d7da921f8eebb0100b8976ff7
Author: zhengchenyu <[email protected]>
AuthorDate: Wed Jul 12 09:57:14 2023 +0800
[#957] feat(tez): Tez examples integration test (#982)
### What changes were proposed in this pull request?
Add integration test for tez
### Why are the changes needed?
To construct tez test pipeline, we need integration test.
### How was this patch tested?
integration test.
---
.github/workflows/parallel.yml | 1 +
common/pom.xml | 18 --
integration-test/common/pom.xml | 12 -
integration-test/tez/pom.xml | 181 ++++++++++++
.../uniffle/test/TezCartesianProductTest.java | 77 +++++
.../org/apache/uniffle/test/TezHashJoinTest.java | 63 ++++
.../uniffle/test/TezIntegrationTestBase.java | 323 +++++++++++++++++++++
.../uniffle/test/TezJoinIntegrationTestBase.java | 68 +++++
.../uniffle/test/TezOrderedWordCountTest.java | 89 ++++++
.../uniffle/test/TezSimpleSessionExampleTest.java | 129 ++++++++
.../apache/uniffle/test/TezSortMergeJoinTest.java | 48 +++
.../org/apache/uniffle/test/TezWordCountTest.java | 68 +++++
pom.xml | 145 +++++++--
server/pom.xml | 12 -
storage/pom.xml | 12 -
15 files changed, 1174 insertions(+), 72 deletions(-)
diff --git a/.github/workflows/parallel.yml b/.github/workflows/parallel.yml
index 059854e3..132b56db 100644
--- a/.github/workflows/parallel.yml
+++ b/.github/workflows/parallel.yml
@@ -61,6 +61,7 @@ jobs:
- mr-hadoop2.8
- mr-hadoop3.2
- tez
+ - tez-hadoop3.2
fail-fast: false
name: -P${{ matrix.profile }}
steps:
diff --git a/common/pom.xml b/common/pom.xml
index dc20d6c9..9e2a1b57 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -161,22 +161,4 @@
</plugin>
</plugins>
</build>
-
- <profiles>
- <profile>
- <id>hadoop3.2</id>
- <dependencies>
- <dependency>
- <groupId>org.bouncycastle</groupId>
- <artifactId>bcprov-jdk15on</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
- </profile>
- </profiles>
</project>
diff --git a/integration-test/common/pom.xml b/integration-test/common/pom.xml
index 952774a8..78975bd4 100644
--- a/integration-test/common/pom.xml
+++ b/integration-test/common/pom.xml
@@ -175,16 +175,4 @@
</plugins>
</pluginManagement>
</build>
- <profiles>
- <profile>
- <id>hadoop3.2</id>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
- </profile>
- </profiles>
</project>
diff --git a/integration-test/tez/pom.xml b/integration-test/tez/pom.xml
new file mode 100644
index 00000000..47a8b33e
--- /dev/null
+++ b/integration-test/tez/pom.xml
@@ -0,0 +1,181 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one or more
+ ~ contributor license agreements. See the NOTICE file distributed with
+ ~ this work for additional information regarding copyright ownership.
+ ~ The ASF licenses this file to You under the Apache License, Version 2.0
+ ~ (the "License"); you may not use this file except in compliance with
+ ~ the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <artifactId>uniffle-parent</artifactId>
+ <groupId>org.apache.uniffle</groupId>
+ <version>0.8.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+
+ <groupId>org.apache.uniffle</groupId>
+ <artifactId>rss-integration-tez-test</artifactId>
+ <version>0.8.0-SNAPSHOT</version>
+ <packaging>jar</packaging>
+ <name>Apache Uniffle Integration Test (Tez)</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.uniffle</groupId>
+ <artifactId>rss-client-tez</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.uniffle</groupId>
+ <artifactId>rss-integration-common-test</artifactId>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.uniffle</groupId>
+ <artifactId>shuffle-storage</artifactId>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.uniffle</groupId>
+ <artifactId>shuffle-server</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.uniffle</groupId>
+ <artifactId>coordinator</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-dag</artifactId>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-tests</artifactId>
+ <type>test-jar</type>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-minicluster</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-common</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <!-- tez-0.9.1 depends on guava 11.0.2, is not compatibility with uniffle
and hadoop-3.2.1 (guava-27.0-jre) until
+ TEZ-4124. We must choose an internal version guava-21.0 so that we
can test. -->
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>21.0</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemProperties>
+ <java.awt.headless>true</java.awt.headless>
+ <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
+ <project.version>${project.version}</project.version>
+ </systemProperties>
+
<redirectTestOutputToFile>${test.redirectToFile}</redirectTestOutputToFile>
+ <useFile>${test.redirectToFile}</useFile>
+ <argLine>-ea -Xmx3g</argLine>
+ <failIfNoTests>false</failIfNoTests>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.4</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.jacoco</groupId>
+ <artifactId>jacoco-maven-plugin</artifactId>
+ <executions>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezCartesianProductTest.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezCartesianProductTest.java
new file mode 100644
index 00000000..f807feb6
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezCartesianProductTest.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.Tool;
+import org.apache.tez.examples.CartesianProduct;
+import org.junit.jupiter.api.Test;
+
+public class TezCartesianProductTest extends TezIntegrationTestBase {
+
+ private String inputPath1 = "cartesian_product_input1";
+ private String inputPath2 = "cartesian_product_input2";
+ private String inputPath3 = "cartesian_product_input3";
+ private String outputPath = "cartesian_product_output";
+
+ @Test
+ public void cartesianProductTest() throws Exception {
+ generateInputFile();
+ run();
+ }
+
+ private void generateInputFile() throws Exception {
+ FSDataOutputStream outputStream1 = fs.create(new Path(inputPath1));
+ FSDataOutputStream outputStream2 = fs.create(new Path(inputPath2));
+ FSDataOutputStream outputStream3 = fs.create(new Path(inputPath3));
+ for (int i = 0; i < 500; i++) {
+ String alphanumeric = RandomStringUtils.randomAlphanumeric(5);
+ String numeric = RandomStringUtils.randomNumeric(5);
+ outputStream1.writeBytes(alphanumeric + "\n");
+ outputStream2.writeBytes(numeric + "\n");
+ if (i % 2 == 0) {
+ outputStream3.writeBytes(alphanumeric + "\n");
+ }
+ }
+ outputStream1.close();
+ outputStream2.close();
+ outputStream3.close();
+ }
+
+ @Override
+ public Tool getTestTool() {
+ return new CartesianProduct();
+ }
+
+ @Override
+ public String[] getTestArgs(String uniqueOutputName) {
+ return new String[] {"-partitioned", inputPath1, inputPath2, inputPath3,
outputPath + "/" + uniqueOutputName};
+ }
+
+ @Override
+ public String getOutputDir(String uniqueOutputName) {
+ return outputPath + "/" + uniqueOutputName;
+ }
+
+ @Override
+ public void verifyResults(String originPath, String rssPath) throws
Exception {
+ verifyResultsSameSet(originPath, rssPath);
+ }
+}
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezHashJoinTest.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezHashJoinTest.java
new file mode 100644
index 00000000..56cc5555
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezHashJoinTest.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.Tool;
+import org.apache.tez.examples.HashJoinExample;
+import org.junit.jupiter.api.Test;
+
+public class TezHashJoinTest extends TezJoinIntegrationTestBase {
+
+ private static final String HASH_JOIN_OUTPUT_PATH = "hash_join_output";
+
+ @Test
+ public void hashJoinTest() throws Exception {
+ generateInputFile();
+ fs.delete(new Path(HASH_JOIN_OUTPUT_PATH), true);
+ run(getTestArgs(HASH_JOIN_OUTPUT_PATH));
+ }
+
+ @Test
+ public void hashJoinDoBroadcastTest() throws Exception {
+ generateInputFile();
+ String[] orignal = getTestArgs(HASH_JOIN_OUTPUT_PATH);
+ String[] args = new String[orignal.length + 1];
+ for (int i = 0; i < orignal.length; i++) {
+ args[i] = orignal[i];
+ }
+ args[orignal.length] = "doBroadcast";
+ fs.delete(new Path(HASH_JOIN_OUTPUT_PATH), true);
+ run(args);
+ }
+
+ @Override
+ public Tool getTestTool() {
+ return new HashJoinExample();
+ }
+
+ @Override
+ public String[] getTestArgs(String uniqueOutputName) {
+ return new String[] {STREAM_INPUT_PATH, HASH_INPUT_PATH, "2",
HASH_JOIN_OUTPUT_PATH};
+ }
+
+ @Override
+ public String getOutputDir(String uniqueOutputName) {
+ return HASH_JOIN_OUTPUT_PATH;
+ }
+}
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezIntegrationTestBase.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezIntegrationTestBase.java
new file mode 100644
index 00000000..c2a7e8d8
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezIntegrationTestBase.java
@@ -0,0 +1,323 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.LineReader;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.tez.client.TezClientUtils;
+import org.apache.tez.common.RssTezConfig;
+import org.apache.tez.dag.api.TezConfiguration;
+import org.apache.tez.dag.app.RssDAGAppMaster;
+import org.apache.tez.test.MiniTezCluster;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.uniffle.common.ClientType;
+import org.apache.uniffle.coordinator.CoordinatorConf;
+import org.apache.uniffle.server.ShuffleServerConf;
+import org.apache.uniffle.storage.util.StorageType;
+
+import static org.apache.tez.dag.api.TezConfiguration.TEZ_LIB_URIS;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class TezIntegrationTestBase extends IntegrationTestBase {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(TezIntegrationTestBase.class);
+ private static String TEST_ROOT_DIR = "target" + Path.SEPARATOR +
TezWordCountTest.class.getName() + "-tmpDir";
+
+ private Path remoteStagingDir = null;
+ protected static MiniTezCluster miniTezCluster;
+
+ @BeforeAll
+ public static void beforeClass() throws Exception {
+ LOG.info("Starting mini tez clusters");
+ if (miniTezCluster == null) {
+ miniTezCluster = new
MiniTezCluster(TezIntegrationTestBase.class.getName(), 1, 1, 1);
+ miniTezCluster.init(conf);
+ miniTezCluster.start();
+ }
+ LOG.info("Starting corrdinators and shuffer servers");
+ CoordinatorConf coordinatorConf = getCoordinatorConf();
+ Map<String, String> dynamicConf = new HashMap();
+ dynamicConf.put(CoordinatorConf.COORDINATOR_REMOTE_STORAGE_PATH.key(),
HDFS_URI + "rss/test");
+ dynamicConf.put(RssTezConfig.RSS_STORAGE_TYPE,
StorageType.MEMORY_LOCALFILE_HDFS.name());
+ addDynamicConf(coordinatorConf, dynamicConf);
+ createCoordinatorServer(coordinatorConf);
+ ShuffleServerConf shuffleServerConf = getShuffleServerConf();
+ createShuffleServer(shuffleServerConf);
+ startServers();
+ }
+
+ @AfterAll
+ public static void tearDown() throws Exception {
+ if (miniTezCluster != null) {
+ LOG.info("Stopping MiniTezCluster");
+ miniTezCluster.stop();
+ miniTezCluster = null;
+ }
+ }
+
+ @BeforeEach
+ public void setup() throws Exception {
+ remoteStagingDir = fs.makeQualified(new Path(TEST_ROOT_DIR,
String.valueOf(new Random().nextInt(100000))));
+ TezClientUtils.ensureStagingDirExists(conf, remoteStagingDir);
+ }
+
+ @AfterEach
+ public void tearDownEach() throws Exception {
+ if (this.remoteStagingDir != null) {
+ fs.delete(this.remoteStagingDir, true);
+ }
+ }
+
+ public void run() throws Exception {
+ // 1 Run original Tez examples
+ TezConfiguration appConf = new
TezConfiguration(miniTezCluster.getConfig());
+ updateCommonConfiguration(appConf);
+ runTezApp(appConf, getTestTool(), getTestArgs("origin"));
+ final String originPath = getOutputDir("origin");
+
+ // 2 Run Tez examples based on rss
+ appConf = new TezConfiguration(miniTezCluster.getConfig());
+ updateRssConfiguration(appConf);
+ appendAndUploadRssJars(appConf);
+ runTezApp(appConf, getTestTool(), getTestArgs("rss"));
+ final String rssPath = getOutputDir("rss");
+
+ // 3 verify the results
+ verifyResults(originPath, rssPath);
+ }
+
+ public Tool getTestTool() {
+ Assertions.fail("getTestTool is not implemented");
+ return null;
+ }
+
+ public String[] getTestArgs(String uniqueOutputName) {
+ Assertions.fail("getTestArgs is not implemented");
+ return new String[0];
+ }
+
+ public String getOutputDir(String uniqueOutputName) {
+ Assertions.fail("getOutputDir is not implemented");
+ return null;
+ }
+
+ public void verifyResults(String originPath, String rssPath) throws
Exception {
+ verifyResultEqual(originPath, rssPath);
+ }
+
+ public void updateCommonConfiguration(Configuration appConf) throws
Exception {
+ appConf.set(TezConfiguration.TEZ_AM_STAGING_DIR,
remoteStagingDir.toString());
+ appConf.setInt(TezConfiguration.TEZ_AM_RESOURCE_MEMORY_MB, 512);
+ appConf.set(TezConfiguration.TEZ_AM_LAUNCH_CMD_OPTS, " -Xmx384m");
+ appConf.setInt(TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB, 512);
+ appConf.set(TezConfiguration.TEZ_TASK_LAUNCH_CMD_OPTS, " -Xmx384m");
+ }
+
+ public void updateRssConfiguration(Configuration appConf) throws Exception {
+ appConf.set(TezConfiguration.TEZ_AM_STAGING_DIR,
remoteStagingDir.toString());
+ appConf.setInt(TezConfiguration.TEZ_AM_RESOURCE_MEMORY_MB, 512);
+ appConf.set(TezConfiguration.TEZ_AM_LAUNCH_CMD_OPTS, " -Xmx384m");
+ appConf.setInt(TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB, 512);
+ appConf.set(TezConfiguration.TEZ_TASK_LAUNCH_CMD_OPTS, " -Xmx384m");
+ appConf.set(RssTezConfig.RSS_COORDINATOR_QUORUM, COORDINATOR_QUORUM);
+ appConf.set(RssTezConfig.RSS_CLIENT_TYPE, ClientType.GRPC.name());
+ appConf.set(TezConfiguration.TEZ_AM_LAUNCH_CMD_OPTS,
TezConfiguration.TEZ_AM_LAUNCH_CMD_OPTS_DEFAULT + " "
+ + RssDAGAppMaster.class.getName());
+ }
+
+ protected static void appendAndUploadRssJars(TezConfiguration tezConf)
throws IOException {
+ String uris = tezConf.get(TEZ_LIB_URIS);
+ Assertions.assertNotNull(uris);
+
+ // Get the rss client tez shaded jar file.
+ URL url = TezIntegrationTestBase.class.getResource("/");
+ final String parentPath =
+ new
Path(url.getPath()).getParent().getParent().getParent().getParent().toString();
+ File file = new File(parentPath, "client-tez/target/shaded");
+ File[] jars = file.listFiles();
+ File rssJar = null;
+ for (File jar : jars) {
+ if (jar.getName().startsWith("rss-client-tez")) {
+ rssJar = jar;
+ break;
+ }
+ }
+
+ // upload rss jars
+ Path testRootDir =
+ fs.makeQualified(new Path("target",
TezIntegrationTestBase.class.getName() + "-tmpDir"));
+ Path appRemoteJar = new Path(testRootDir, "rss-tez-client-shaded.jar");
+ fs.copyFromLocalFile(new Path(rssJar.toString()), appRemoteJar);
+ fs.setPermission(appRemoteJar, new FsPermission("777"));
+
+ // update tez.lib.uris
+ tezConf.set(TEZ_LIB_URIS, uris + "," + appRemoteJar);
+ }
+
+ protected void runTezApp(TezConfiguration tezConf, Tool tool, String[] args)
throws Exception {
+ assertEquals(0, ToolRunner.run(tezConf, tool, args),
tool.getClass().getName() + " failed");
+ }
+
+ public static void verifyResultEqual(String originPath, String rssPath)
throws Exception {
+ if (originPath == null && rssPath == null) {
+ return;
+ }
+ Path originPathFs = new Path(originPath);
+ Path rssPathFs = new Path(rssPath);
+ FileStatus[] originFiles = fs.listStatus(originPathFs);
+ FileStatus[] rssFiles = fs.listStatus(rssPathFs);
+ long originLen = 0;
+ long rssLen = 0;
+ List<String> originFileList = new ArrayList();
+ List<String> rssFileList = new ArrayList();
+ for (FileStatus file : originFiles) {
+ originLen += file.getLen();
+ String name = file.getPath().getName();
+ if (!name.equals("_SUCCESS")) {
+ originFileList.add(name);
+ }
+ }
+ for (FileStatus file : rssFiles) {
+ rssLen += file.getLen();
+ String name = file.getPath().getName();
+ if (!name.equals("_SUCCESS")) {
+ rssFileList.add(name);
+ }
+ }
+ assertEquals(originFileList.size(), rssFileList.size());
+ for (int i = 0; i < originFileList.size(); i++) {
+ assertEquals(originFileList.get(i), rssFileList.get(i));
+ Path p1 = new Path(originPath, originFileList.get(i));
+ FSDataInputStream f1 = fs.open(p1);
+ Path p2 = new Path(rssPath, rssFileList.get(i));
+ FSDataInputStream f2 = fs.open(p2);
+ boolean isNotEof1 = true;
+ boolean isNotEof2 = true;
+ while (isNotEof1 && isNotEof2) {
+ byte b1 = 1;
+ byte b2 = 1;
+ try {
+ b1 = f1.readByte();
+ } catch (EOFException ee) {
+ isNotEof1 = false;
+ }
+ try {
+ b2 = f2.readByte();
+ } catch (EOFException ee) {
+ isNotEof2 = false;
+ }
+ assertEquals(b1, b2);
+ }
+ assertEquals(isNotEof1, isNotEof2);
+ }
+ assertEquals(originLen, rssLen);
+ }
+
+ public static void verifyResultsSameSet(String originPath, String rssPath)
throws Exception {
+ if (originPath == null && rssPath == null) {
+ return;
+ }
+ // 1 List the originalPath and rssPath, make sure generated file are same.
+ Path originPathFs = new Path(originPath);
+ Path rssPathFs = new Path(rssPath);
+ FileStatus[] originFiles = fs.listStatus(originPathFs);
+ FileStatus[] rssFiles = fs.listStatus(rssPathFs);
+ long originLen = 0;
+ long rssLen = 0;
+ List<String> originFileList = new ArrayList<>();
+ List<String> rssFileList = new ArrayList<>();
+ for (FileStatus file : originFiles) {
+ originLen += file.getLen();
+ String name = file.getPath().getName();
+ if (!name.equals("_SUCCESS")) {
+ originFileList.add(name);
+ }
+ }
+ for (FileStatus file : rssFiles) {
+ rssLen += file.getLen();
+ String name = file.getPath().getName();
+ if (!name.equals("_SUCCESS")) {
+ rssFileList.add(name);
+ }
+ }
+ assertEquals(originFileList.size(), rssFileList.size());
+
+ // 2 Load original result and rss result to hashmap
+ Map<String, Integer> originalResults = new HashMap<>();
+ for (int i = 0; i < originFileList.size(); i++) {
+ Path path = new Path(originPath, originFileList.get(i));
+ LineReader lineReader = new LineReader(fs.open(path));
+ Text line = new Text();
+ while (lineReader.readLine(line) > 0) {
+ if (!originalResults.containsKey(line.toString())) {
+ originalResults.put(line.toString(), 1);
+ } else {
+ originalResults.put(line.toString(),
originalResults.get(line.toString()) + 1);
+ }
+ }
+ }
+
+ Map<String, Integer> rssResults = new HashMap<>();
+ for (int i = 0; i < rssFileList.size(); i++) {
+ Path path = new Path(rssPath, rssFileList.get(i));
+ LineReader lineReader = new LineReader(fs.open(path));
+ Text line = new Text();
+ while (lineReader.readLine(line) > 0) {
+ if (!rssResults.containsKey(line.toString())) {
+ rssResults.put(line.toString(), 1);
+ } else {
+ rssResults.put(line.toString(), rssResults.get(line.toString()) + 1);
+ }
+ }
+ }
+
+ // 3 Compare the hashmap
+ Assertions.assertEquals(originalResults.size(), rssResults.size(),
+ "The size of cartesian product set is not equal");
+ for (Map.Entry<String, Integer> entry : originalResults.entrySet()) {
+ Assertions.assertTrue(rssResults.containsKey(entry.getKey()),
+ entry.getKey() + " is not found in rss cartesian product result");
+ Assertions.assertEquals(entry.getValue(), rssResults.get(entry.getKey()),
+ "the value of " + entry.getKey() + " is not equal to in rss
cartesian product result");
+ }
+ }
+}
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezJoinIntegrationTestBase.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezJoinIntegrationTestBase.java
new file mode 100644
index 00000000..6e9b055f
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezJoinIntegrationTestBase.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.tez.dag.api.TezConfiguration;
+import org.apache.tez.examples.JoinDataGen;
+import org.apache.tez.examples.JoinValidate;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class TezJoinIntegrationTestBase extends TezIntegrationTestBase {
+
+ protected static final String STREAM_INPUT_PATH = "stream_input";
+ protected static final String STREAM_INPUT_FILE_SIZE = "5000000";
+ protected static final String HASH_INPUT_PATH = "hash_input";
+ protected static final String HASH_INPUT_FILE_SIZE = "500000";
+ protected static final String JOIN_EXPECTED_PATH = "join_expected";
+ protected static final String NUM_TASKS = "2";
+
+ protected void generateInputFile() throws Exception {
+ fs.delete(new Path(STREAM_INPUT_PATH), true);
+ fs.delete(new Path(HASH_INPUT_PATH), true);
+ fs.delete(new Path(JOIN_EXPECTED_PATH), true);
+ String[] args = {STREAM_INPUT_PATH, STREAM_INPUT_FILE_SIZE,
HASH_INPUT_PATH, HASH_INPUT_FILE_SIZE,
+ JOIN_EXPECTED_PATH, NUM_TASKS};
+ JoinDataGen dataGen = new JoinDataGen();
+ TezConfiguration appConf = new
TezConfiguration(miniTezCluster.getConfig());
+ updateCommonConfiguration(appConf);
+ assertEquals(0, ToolRunner.run(appConf, dataGen, args), "JoinDataGen
failed");
+ }
+
+ @Override
+ public void verifyResults(String expectedPath, String rssPath) throws
Exception {
+ String[] args = {expectedPath, rssPath};
+ JoinValidate validate = new JoinValidate();
+ TezConfiguration appConf = new
TezConfiguration(miniTezCluster.getConfig());
+ updateCommonConfiguration(appConf);
+ assertEquals(0, ToolRunner.run(appConf, validate, args), "JoinValidate
failed");
+ }
+
+ public void run(String[] overrideArgs) throws Exception {
+ // 1 Run Tez examples based on rss
+ TezConfiguration appConf = new
TezConfiguration(miniTezCluster.getConfig());
+ updateRssConfiguration(appConf);
+ appendAndUploadRssJars(appConf);
+ runTezApp(appConf, getTestTool(), overrideArgs);
+
+ // 2 check the result
+ verifyResults(JOIN_EXPECTED_PATH, getOutputDir(""));
+ }
+}
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezOrderedWordCountTest.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezOrderedWordCountTest.java
new file mode 100644
index 00000000..f661612a
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezOrderedWordCountTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
+
+import com.google.common.collect.Lists;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.Tool;
+import org.apache.tez.examples.OrderedWordCount;
+import org.junit.jupiter.api.Test;
+
+public class TezOrderedWordCountTest extends TezIntegrationTestBase {
+
+ private String inputPath = "ordered_word_count_input";
+ private String outputPath = "ordered_word_count_output";
+ private List<String> wordTable =
+ Lists.newArrayList("apple", "banana", "fruit", "cherry", "Chinese",
"America", "Japan", "tomato");
+
+ @Test
+ public void orderedWordCountTest() throws Exception {
+ generateInputFile();
+ run();
+ }
+
+ private void generateInputFile() throws Exception {
+ // For ordered word count, the key of last ordered sorter is the summation
of word, the value is
+ // the word. So it means this key may not be unique. Because Sorter can
only make sure key is
+ // sorted, so the second column (word column) may be not sorted.
+ // To keep pace with verifyResults, here make sure summation of word is
unique number.
+ FSDataOutputStream outputStream = fs.create(new Path(inputPath));
+ Random random = new Random();
+ Set<Integer> used = new HashSet();
+ List<String> outputList = new ArrayList<>();
+ int index = 0;
+ while (index < wordTable.size()) {
+ int summation = random.nextInt(50);
+ if (used.contains(summation)) {
+ continue;
+ }
+ used.add(summation);
+ for (int i = 0; i < summation; i++) {
+ outputList.add(wordTable.get(index));
+ }
+ index++;
+ }
+ Collections.shuffle(outputList);
+ for (String word : outputList) {
+ outputStream.writeBytes(word + "\n");
+ }
+ outputStream.close();
+ }
+
+ @Override
+ public Tool getTestTool() {
+ return new OrderedWordCount();
+ }
+
+ @Override
+ public String[] getTestArgs(String uniqueOutputName) {
+ return new String[] {inputPath, outputPath + "/" + uniqueOutputName, "2"};
+ }
+
+ @Override
+ public String getOutputDir(String uniqueOutputName) {
+ return outputPath + "/" + uniqueOutputName;
+ }
+}
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezSimpleSessionExampleTest.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezSimpleSessionExampleTest.java
new file mode 100644
index 00000000..ef7e4052
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezSimpleSessionExampleTest.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
+
+import com.google.common.collect.Lists;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.Tool;
+import org.apache.tez.dag.api.TezConfiguration;
+import org.apache.tez.examples.SimpleSessionExample;
+import org.junit.jupiter.api.Test;
+
+import org.apache.uniffle.common.exception.RssException;
+
+public class TezSimpleSessionExampleTest extends TezIntegrationTestBase {
+
+ private String inputPath = "simple_session_input";
+ private String outputPath = "simple_session_output";
+ private List<String> wordTable = Lists.newArrayList("apple", "banana",
"fruit", "cherry",
+ "Chinese", "America", "Japan", "tomato");
+
+ @Test
+ public void simpleSessionExampleTest() throws Exception {
+ generateInputFile();
+ run();
+ }
+
+ @Override
+ public void updateCommonConfiguration(Configuration appConf) throws
Exception {
+ super.updateCommonConfiguration(appConf);
+ appConf.setBoolean(TezConfiguration.TEZ_AM_SESSION_MODE, true);
+ }
+
+ @Override
+ public void updateRssConfiguration(Configuration appConf) throws Exception {
+ super.updateRssConfiguration(appConf);
+ appConf.setBoolean(TezConfiguration.TEZ_AM_SESSION_MODE, true);
+ }
+
+ private void generateInputFile() throws Exception {
+ for (int i = 0; i < 3; i++) {
+ generateInputFile(inputPath + "." + i);
+ }
+ }
+
+ private void generateInputFile(String inputPath) throws Exception {
+ // For ordered word count, the key of last ordered sorter is the summation
of word, the value is
+ // the word. So it means this key may not be unique. Because Sorter can
only make sure key is
+ // sorted, so the second column (word column) may be not sorted.
+ // To keep pace with verifyResults, here make sure summation of word is
unique number.
+ FSDataOutputStream outputStream = fs.create(new Path(inputPath));
+ Random random = new Random();
+ Set<Integer> used = new HashSet();
+ List<String> outputList = new ArrayList<>();
+ int index = 0;
+ while (index < wordTable.size()) {
+ int summation = random.nextInt(50);
+ if (used.contains(summation)) {
+ continue;
+ }
+ used.add(summation);
+ for (int i = 0; i < summation; i++) {
+ outputList.add(wordTable.get(index));
+ }
+ index++;
+ }
+ Collections.shuffle(outputList);
+ for (String word : outputList) {
+ outputStream.writeBytes(word + "\n");
+ }
+ outputStream.close();
+ }
+
+ @Override
+ public Tool getTestTool() {
+ return new SimpleSessionExample();
+ }
+
+ @Override
+ public String[] getTestArgs(String uniqueOutputName) {
+ return new String[] {
+ inputPath + ".0" + "," + inputPath + ".1" + "," + inputPath + ".2",
+ outputPath + "/" + uniqueOutputName + ".0" + "," + outputPath + "/" +
uniqueOutputName
+ + ".1" + "," + outputPath + "/" + uniqueOutputName + ".2",
+ "2"
+ };
+ }
+
+ @Override
+ public String getOutputDir(String uniqueOutputName) {
+ return outputPath + "/" + uniqueOutputName + ".0" + "," + outputPath + "/"
+ uniqueOutputName
+ + ".1" + "," + outputPath + "/" + uniqueOutputName + ".2";
+ }
+
+ @Override
+ public void verifyResults(String originPath, String rssPath) throws
Exception {
+ String[] originPaths = originPath.split(",");
+ String[] rssPaths = rssPath.split(",");
+ if (originPaths.length != rssPaths.length) {
+ throw new RssException("The length of paths is mismatched!");
+ }
+ for (int i = 0; i < originPaths.length; i++) {
+ verifyResultEqual(originPaths[i], rssPaths[i]);
+ }
+ }
+}
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezSortMergeJoinTest.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezSortMergeJoinTest.java
new file mode 100644
index 00000000..b31bfa8f
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezSortMergeJoinTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import org.apache.hadoop.util.Tool;
+import org.apache.tez.examples.SortMergeJoinExample;
+import org.junit.jupiter.api.Test;
+
+public class TezSortMergeJoinTest extends TezJoinIntegrationTestBase {
+
+ private static final String SORT_MERGE_JOIN_OUTPUT_PATH =
"sort_merge_join_output";
+
+ @Test
+ public void sortMergeJoinTest() throws Exception {
+ generateInputFile();
+ run(getTestArgs(""));
+ }
+
+ @Override
+ public Tool getTestTool() {
+ return new SortMergeJoinExample();
+ }
+
+ @Override
+ public String[] getTestArgs(String uniqueOutputName) {
+ return new String[] {STREAM_INPUT_PATH, HASH_INPUT_PATH, "2",
SORT_MERGE_JOIN_OUTPUT_PATH};
+ }
+
+ @Override
+ public String getOutputDir(String uniqueOutputName) {
+ return SORT_MERGE_JOIN_OUTPUT_PATH;
+ }
+}
diff --git
a/integration-test/tez/src/test/java/org/apache/uniffle/test/TezWordCountTest.java
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezWordCountTest.java
new file mode 100644
index 00000000..b590b6f3
--- /dev/null
+++
b/integration-test/tez/src/test/java/org/apache/uniffle/test/TezWordCountTest.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.test;
+
+import java.util.List;
+import java.util.Random;
+
+import com.google.common.collect.Lists;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.Tool;
+import org.apache.tez.examples.WordCount;
+import org.junit.jupiter.api.Test;
+
+public class TezWordCountTest extends TezIntegrationTestBase {
+
+ private String inputPath = "word_count_input";
+ private String outputPath = "word_count_output";
+ private List<String> wordTable =
+ Lists.newArrayList("apple", "banana", "fruit", "cherry", "Chinese",
"America", "Japan", "tomato");
+
+ @Test
+ public void wordCountTest() throws Exception {
+ generateInputFile();
+ run();
+ }
+
+ private void generateInputFile() throws Exception {
+ FSDataOutputStream outputStream = fs.create(new Path(inputPath));
+ Random random = new Random();
+ for (int i = 0; i < 100; i++) {
+ int index = random.nextInt(wordTable.size());
+ String str = wordTable.get(index) + "\n";
+ outputStream.writeBytes(str);
+ }
+ outputStream.close();
+ }
+
+ @Override
+ public Tool getTestTool() {
+ return new WordCount();
+ }
+
+ @Override
+ public String[] getTestArgs(String uniqueOutputName) {
+ return new String[] {inputPath, outputPath + "/" + uniqueOutputName, "2"};
+ }
+
+ @Override
+ public String getOutputDir(String uniqueOutputName) {
+ return outputPath + "/" + uniqueOutputName;
+ }
+}
diff --git a/pom.xml b/pom.xml
index ac908b60..7df5ff30 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1684,6 +1684,7 @@
<id>tez</id>
<modules>
<module>client-tez</module>
+ <module>integration-test/tez</module>
</modules>
<properties>
<tez.version>0.9.1</tez.version>
@@ -1694,33 +1695,135 @@
<groupId>org.apache.tez</groupId>
<artifactId>tez-common</artifactId>
<version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-runtime-library</artifactId>
<version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-runtime-internals</artifactId>
<version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-dag</artifactId>
<version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-api</artifactId>
<version>${tez.version}</version>
<exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
</exclusions>
</dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-tests</artifactId>
+ <type>test-jar</type>
+ <scope>test</scope>
+ <version>${tez.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.uniffle</groupId>
+ <artifactId>rss-integration-common-test</artifactId>
+ <version>${project.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
@@ -1736,6 +1839,16 @@
</exclusion>
</exclusions>
</dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-common</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
@@ -1775,25 +1888,21 @@
<hadoop.short.version>3.2</hadoop.short.version>
<commons-codec.version>1.11</commons-codec.version>
<metrics.version>3.2.4</metrics.version>
- <bouncycastle.version>1.60</bouncycastle.version>
- <junit4.version>4.11</junit4.version>
</properties>
- <dependencyManagement>
- <dependencies>
- <dependency>
- <groupId>org.bouncycastle</groupId>
- <artifactId>bcprov-jdk15on</artifactId>
- <version>${bouncycastle.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>${junit4.version}</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
- </dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk15on</artifactId>
+ <version>1.60</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.11</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
</profile>
</profiles>
</project>
diff --git a/server/pom.xml b/server/pom.xml
index 78586b71..89aa7c45 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -219,16 +219,4 @@
</plugin>
</plugins>
</build>
- <profiles>
- <profile>
- <id>hadoop3.2</id>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
- </profile>
- </profiles>
</project>
diff --git a/storage/pom.xml b/storage/pom.xml
index 4caa6d86..abf0cffe 100644
--- a/storage/pom.xml
+++ b/storage/pom.xml
@@ -111,16 +111,4 @@
</plugin>
</plugins>
</build>
- <profiles>
- <profile>
- <id>hadoop3.2</id>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
- </profile>
- </profiles>
</project>