This is an automated email from the ASF dual-hosted git repository.

roryqi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-uniffle.git


The following commit(s) were added to refs/heads/master by this push:
     new b08efa3c [#896] Improvement: Support Hadoop-3.2 (#897)
b08efa3c is described below

commit b08efa3cfe0ef55a97f4c59f0dc1ebfd0b8441ac
Author: zhengchenyu <[email protected]>
AuthorDate: Mon May 29 23:36:04 2023 +0800

    [#896] Improvement: Support Hadoop-3.2 (#897)
    
    ### What changes were proposed in this pull request?
    
    This PR aims to support Hadoop-3.2. Add new profile hadoop3.2, hadoop2.8
    
    ### Why are the changes needed?
    
    The changes in PR are described in below:
    * Add Hadoop shim so that we can build with different hadoop version.
    * Rename org.apache.commons.lang.* to org.apache.commons.lang3.*
      In this project, commons-lang3 was chosen. Some code use commons-lang2 
which is depended by hadoop2. When we use hadoop3, this project couldn't find 
commons-lang2.
    * Something related with RequireUpperBoundDeps.
    upgrade commons-codec.version to 1.11, upgrade metrics.version to 3.2.3 in 
hadoop3.2 profile. exclude jersey-json.
    
    ### Does this PR introduce _any_ user-facing change?
    
    When user build the project, should add option -Phadoop2.8 or -Phadoop3.2
    
    ### How was this patch tested?
    
    1. Unit test
    2. Build test
---
 .github/workflows/parallel.yml                     |  8 ++-
 .github/workflows/sequential.yml                   |  9 ++-
 build_distribution.sh                              | 14 +++--
 client-mr/{ => core}/pom.xml                       | 12 +++-
 .../hadoop/mapred/RssMapOutputCollector.java       |  0
 .../org/apache/hadoop/mapred/SortWriteBuffer.java  |  0
 .../hadoop/mapred/SortWriteBufferManager.java      |  0
 .../org/apache/hadoop/mapreduce/MRIdHelper.java    |  0
 .../org/apache/hadoop/mapreduce/RssMRConfig.java   |  0
 .../org/apache/hadoop/mapreduce/RssMRUtils.java    |  0
 .../mapreduce/task/reduce/RssBypassWriter.java     |  0
 .../mapreduce/task/reduce/RssEventFetcher.java     |  0
 .../hadoop/mapreduce/task/reduce/RssFetcher.java   |  0
 .../task/reduce/RssInMemoryRemoteMerger.java       |  0
 .../task/reduce/RssRemoteMergeManagerImpl.java     |  0
 .../hadoop/mapreduce/task/reduce/RssShuffle.java   |  3 +-
 .../hadoop/mapreduce/v2/app/RssMRAppMaster.java    | 15 +----
 .../hadoop/mapred/SortWriteBufferManagerTest.java  |  0
 .../apache/hadoop/mapred/SortWriteBufferTest.java  |  0
 .../apache/hadoop/mapreduce/RssMRUtilsTest.java    |  0
 .../mapreduce/task/reduce/EventFetcherTest.java    |  0
 .../hadoop/mapreduce/task/reduce/FetcherTest.java  |  3 +-
 .../task/reduce/RssInMemoryRemoteMergerTest.java   |  0
 .../task/reduce/RssRemoteMergeManagerTest.java     |  0
 {client => client-mr/hadoop2.8}/pom.xml            | 46 ++++++---------
 .../apache/uniffle/hadoop/shim/HadoopShimImpl.java | 62 +++++++++++++++++++
 {client => client-mr/hadoop3.2}/pom.xml            | 50 +++++++---------
 .../apache/uniffle/hadoop/shim/HadoopShimImpl.java | 50 ++++++++++++++++
 .../shuffle/reader/RssShuffleDataIteratorTest.java |  2 +-
 .../shuffle/writer/WriteBufferManagerTest.java     |  2 +-
 client-tez/pom.xml                                 |  6 ++
 .../java/org/apache/tez/common/RssTezUtils.java    |  6 +-
 client/pom.xml                                     | 10 ++++
 common/pom.xml                                     | 39 ++++++++++++
 coordinator/pom.xml                                |  6 ++
 integration-test/common/pom.xml                    | 18 ++++++
 integration-test/mr/pom.xml                        |  6 ++
 .../apache/uniffle/test/MRIntegrationTestBase.java |  3 +-
 pom.xml                                            | 69 +++++++++++++++++++++-
 server/pom.xml                                     | 22 +++++++
 .../server/storage/LocalStorageManagerTest.java    |  2 +-
 storage/pom.xml                                    | 18 ++++++
 .../common/DefaultStorageMediaProvider.java        |  4 +-
 .../impl/PooledHadoopShuffleWriteHandler.java      |  2 +-
 .../uniffle/storage/util/ShuffleStorageUtils.java  |  2 +-
 45 files changed, 396 insertions(+), 93 deletions(-)

diff --git a/.github/workflows/parallel.yml b/.github/workflows/parallel.yml
index aa270e0e..6638a9a1 100644
--- a/.github/workflows/parallel.yml
+++ b/.github/workflows/parallel.yml
@@ -57,7 +57,8 @@ jobs:
           - spark3.2
           - spark3.2.0
           - spark3.3
-          - mr
+          - mr:hadoop2.8
+          - mr:hadoop3.2
           - tez
       fail-fast: false
     name: -P${{ matrix.profile }}
@@ -80,7 +81,10 @@ jobs:
           mvn-${{ inputs.java-version }}-package-${{ matrix.profile }}-
           mvn-${{ inputs.java-version }}-package-
     - name: Execute `mvn ${{ inputs.maven-args }} -P${{ matrix.profile }}`
-      run: mvn -B -fae ${{ inputs.maven-args }} -P${{ matrix.profile }} | tee 
/tmp/maven.log
+      run: |
+        PROFILES="${{ matrix.profile }}"
+        PROFILES=${PROFILES/:/,}
+        mvn -B -fae ${{ inputs.maven-args }} -P${PROFILES} | tee /tmp/maven.log
       shell: bash
     - name: Summary of failures
       if: ${{ failure() && inputs.summary != '' }}
diff --git a/.github/workflows/sequential.yml b/.github/workflows/sequential.yml
index 0f59604b..1ecddeb3 100644
--- a/.github/workflows/sequential.yml
+++ b/.github/workflows/sequential.yml
@@ -74,11 +74,14 @@ jobs:
     - name: Execute `mvn ${{ inputs.maven-args }} -Pspark2`
       run: mvn -B -fae ${{ inputs.maven-args }} -Pspark2 | tee -a 
/tmp/maven.log;
       shell: bash
-    - name: Execute `mvn ${{ inputs.maven-args }} -Pmr`
-      run: mvn -B -fae ${{ inputs.maven-args }} -Pmr | tee -a /tmp/maven.log;
+    - name: Execute `mvn ${{ inputs.maven-args }} -Pmr,hadoop2.8`
+      run: mvn -B -fae ${{ inputs.maven-args }} -Pmr,hadoop2.8 | tee -a 
/tmp/maven.log;
+      shell: bash
+    - name: Execute `mvn ${{ inputs.maven-args }} -Pmr,hadoop3.2`
+      run: mvn -B -fae ${{ inputs.maven-args }} -Pmr,hadoop3.2 | tee -a 
/tmp/maven.log;
       shell: bash
     - name: Execute `mvn ${{ inputs.maven-args }} -Ptez`
-      run: mvn -B -fae ${{ inputs.maven-args }} -Pmr | tee -a /tmp/maven.log;
+      run: mvn -B -fae ${{ inputs.maven-args }} -Pmr,hadoop2.8 | tee -a 
/tmp/maven.log;
       shell: bash
     - name: Summary of failures
       if: ${{ failure() && inputs.summary != '' }}
diff --git a/build_distribution.sh b/build_distribution.sh
index 8c061e8d..f36080e1 100755
--- a/build_distribution.sh
+++ b/build_distribution.sh
@@ -37,6 +37,7 @@ function exit_with_usage() {
   echo 
"+------------------------------------------------------------------------------------------------------+"
   echo "| ./build_distribution.sh [--spark2-profile <spark2 profile id>] 
[--spark2-mvn <custom maven options>] |"
   echo "|                         [--spark3-profile <spark3 profile id>] 
[--spark3-mvn <custom maven options>] |"
+  echo "|                         [--hadoop-profile <hadoop profile id>]       
                                |"
   echo "|                         <maven build options>                        
                                |"
   echo 
"+------------------------------------------------------------------------------------------------------+"
   exit 1
@@ -46,6 +47,7 @@ SPARK2_PROFILE_ID="spark2"
 SPARK2_MVN_OPTS=""
 SPARK3_PROFILE_ID="spark3"
 SPARK3_MVN_OPTS=""
+HADOOP_PROFILE_ID="hadoop2.8"
 while (( "$#" )); do
   case $1 in
     --spark2-profile)
@@ -64,6 +66,10 @@ while (( "$#" )); do
       SPARK3_MVN_OPTS=$2
       shift
       ;;
+    --hadoop-profile)
+      HADOOP_PROFILE_ID=$2
+      shift
+      ;;
     --help)
       exit_with_usage
       ;;
@@ -103,7 +109,7 @@ VERSION=$("$MVN" help:evaluate -Dexpression=project.version 
$@ 2>/dev/null |
   tail -n 1)
 
 # Dependencies version
-HADOOP_VERSION=$("$MVN" help:evaluate -Dexpression=hadoop.version $@ 
2>/dev/null\
+HADOOP_VERSION=$("$MVN" help:evaluate -Dexpression=hadoop.version 
-P$HADOOP_PROFILE_ID $@ 2>/dev/null\
     | grep -v "INFO"\
     | grep -v "WARNING"\
     | tail -n 1)
@@ -137,7 +143,7 @@ DISTDIR="rss-$VERSION"
 rm -rf "$DISTDIR"
 mkdir -p "${DISTDIR}/jars"
 echo "RSS ${VERSION}${GITREVSTRING} built for Hadoop ${HADOOP_VERSION} Spark2 
${SPARK2_VERSION} Spark3 ${SPARK3_VERSION}" >"${DISTDIR}/RELEASE"
-echo "Build flags: --spark2-profile '$SPARK2_PROFILE_ID' --spark2-mvn 
'$SPARK2_MVN_OPTS' --spark3-profile '$SPARK3_PROFILE_ID' --spark3-mvn 
'$SPARK3_MVN_OPTS' $@" >>"$DISTDIR/RELEASE"
+echo "Build flags: --spark2-profile '$SPARK2_PROFILE_ID' --spark2-mvn 
'$SPARK2_MVN_OPTS' --spark3-profile '$SPARK3_PROFILE_ID' --spark3-mvn 
'$SPARK3_MVN_OPTS' --hadoop-profile '$HADOOP_PROFILE_ID' $@" 
>>"$DISTDIR/RELEASE"
 mkdir -p "${DISTDIR}/logs"
 
 SERVER_JAR_DIR="${DISTDIR}/jars/server"
@@ -191,13 +197,13 @@ 
SPARK_CLIENT3_JAR="${RSS_HOME}/client-spark/spark3/target/shaded/rss-client-spar
 echo "copy $SPARK_CLIENT3_JAR to ${SPARK_CLIENT3_JAR_DIR}"
 cp $SPARK_CLIENT3_JAR $SPARK_CLIENT3_JAR_DIR
 
-BUILD_COMMAND_MR=("$MVN" clean package -Pmr -pl client-mr -DskipTests -am $@)
+BUILD_COMMAND_MR=("$MVN" clean package -Pmr,$HADOOP_PROFILE_ID -pl 
client-mr/core -DskipTests -am $@)
 echo -e "\nBuilding with..."
 echo -e "\$ ${BUILD_COMMAND_MR[@]}\n"
 "${BUILD_COMMAND_MR[@]}"
 MR_CLIENT_JAR_DIR="${CLIENT_JAR_DIR}/mr"
 mkdir -p $MR_CLIENT_JAR_DIR
-MR_CLIENT_JAR="${RSS_HOME}/client-mr/target/shaded/rss-client-mr-${VERSION}-shaded.jar"
+MR_CLIENT_JAR="${RSS_HOME}/client-mr/core/target/shaded/rss-client-mr-${VERSION}-shaded.jar"
 echo "copy $MR_CLIENT_JAR to ${MR_CLIENT_JAR_DIR}"
 cp $MR_CLIENT_JAR $MR_CLIENT_JAR_DIR
 
diff --git a/client-mr/pom.xml b/client-mr/core/pom.xml
similarity index 95%
rename from client-mr/pom.xml
rename to client-mr/core/pom.xml
index 5648970d..eaa42c75 100644
--- a/client-mr/pom.xml
+++ b/client-mr/core/pom.xml
@@ -23,7 +23,7 @@
         <artifactId>uniffle-parent</artifactId>
         <groupId>org.apache.uniffle</groupId>
         <version>0.8.0-SNAPSHOT</version>
-        <relativePath>../pom.xml</relativePath>
+        <relativePath>../../pom.xml</relativePath>
     </parent>
 
     <groupId>org.apache.uniffle</groupId>
@@ -63,6 +63,12 @@
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-common</artifactId>
             <version>${hadoop.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-json</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>org.apache.commons</groupId>
@@ -105,6 +111,10 @@
             <groupId>com.github.luben</groupId>
             <artifactId>zstd-jni</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.uniffle</groupId>
+            <artifactId>hadoop${hadoop.short.version}-shim</artifactId>
+        </dependency>
     </dependencies>
 
     <build>
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapred/RssMapOutputCollector.java 
b/client-mr/core/src/main/java/org/apache/hadoop/mapred/RssMapOutputCollector.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapred/RssMapOutputCollector.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapred/RssMapOutputCollector.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapred/SortWriteBuffer.java 
b/client-mr/core/src/main/java/org/apache/hadoop/mapred/SortWriteBuffer.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapred/SortWriteBuffer.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapred/SortWriteBuffer.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapred/SortWriteBufferManager.java 
b/client-mr/core/src/main/java/org/apache/hadoop/mapred/SortWriteBufferManager.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapred/SortWriteBufferManager.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapred/SortWriteBufferManager.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/MRIdHelper.java 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/MRIdHelper.java
similarity index 100%
rename from client-mr/src/main/java/org/apache/hadoop/mapreduce/MRIdHelper.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/MRIdHelper.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/RssMRConfig.java 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/RssMRConfig.java
similarity index 100%
rename from client-mr/src/main/java/org/apache/hadoop/mapreduce/RssMRConfig.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/RssMRConfig.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/RssMRUtils.java 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/RssMRUtils.java
similarity index 100%
rename from client-mr/src/main/java/org/apache/hadoop/mapreduce/RssMRUtils.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/RssMRUtils.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssBypassWriter.java
 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssBypassWriter.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssBypassWriter.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssBypassWriter.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssEventFetcher.java
 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssEventFetcher.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssEventFetcher.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssEventFetcher.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssFetcher.java
 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssFetcher.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssFetcher.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssFetcher.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMerger.java
 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMerger.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMerger.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMerger.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerImpl.java
 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerImpl.java
similarity index 100%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerImpl.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerImpl.java
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssShuffle.java
 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssShuffle.java
similarity index 98%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssShuffle.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssShuffle.java
index 0094ecd5..ea21876b 100644
--- 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssShuffle.java
+++ 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/RssShuffle.java
@@ -44,6 +44,7 @@ import org.apache.uniffle.client.factory.ShuffleClientFactory;
 import org.apache.uniffle.client.request.CreateShuffleReadClientRequest;
 import org.apache.uniffle.common.RemoteStorageInfo;
 import org.apache.uniffle.common.ShuffleServerInfo;
+import org.apache.uniffle.hadoop.shim.HadoopShimImpl;
 
 public class RssShuffle<K, V> implements ShuffleConsumerPlugin<K, V>, 
ExceptionReporter {
 
@@ -90,7 +91,7 @@ public class RssShuffle<K, V> implements 
ShuffleConsumerPlugin<K, V>, ExceptionR
 
     this.umbilical = context.getUmbilical();
     this.reporter = context.getReporter();
-    this.metrics = new ShuffleClientMetrics(reduceId, mrJobConf);
+    this.metrics = HadoopShimImpl.createShuffleClientMetrics(reduceId, 
mrJobConf);
     this.copyPhase = context.getCopyPhase();
     this.taskStatus = context.getStatus();
     this.reduceTask = context.getReduceTask();
diff --git 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/v2/app/RssMRAppMaster.java
 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/v2/app/RssMRAppMaster.java
similarity index 96%
rename from 
client-mr/src/main/java/org/apache/hadoop/mapreduce/v2/app/RssMRAppMaster.java
rename to 
client-mr/core/src/main/java/org/apache/hadoop/mapreduce/v2/app/RssMRAppMaster.java
index ee5962f7..c79dda62 100644
--- 
a/client-mr/src/main/java/org/apache/hadoop/mapreduce/v2/app/RssMRAppMaster.java
+++ 
b/client-mr/core/src/main/java/org/apache/hadoop/mapreduce/v2/app/RssMRAppMaster.java
@@ -51,7 +51,6 @@ import 
org.apache.hadoop.mapreduce.v2.app.local.LocalContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator;
-import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
@@ -62,11 +61,9 @@ import org.apache.hadoop.util.ExitUtil;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.util.SystemClock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -81,6 +78,7 @@ import org.apache.uniffle.common.ShuffleServerInfo;
 import org.apache.uniffle.common.exception.RssException;
 import org.apache.uniffle.common.util.Constants;
 import org.apache.uniffle.common.util.RetryUtils;
+import org.apache.uniffle.hadoop.shim.HadoopShimImpl;
 import org.apache.uniffle.storage.util.StorageType;
 
 import static 
org.apache.uniffle.common.config.RssClientConf.MAX_CONCURRENCY_PER_PARTITION_TO_WRITE;
@@ -405,16 +403,7 @@ public class RssMRAppMaster extends MRAppMaster {
             RssMRAppMaster.this.rssNmHttpPort,
             RssMRAppMaster.this.rssContainerID);
       } else {
-        this.containerAllocator = new RMContainerAllocator(this.clientService, 
this.context) {
-          @Override
-          protected AllocateResponse makeRemoteRequest() throws YarnException, 
IOException {
-            AllocateResponse response = super.makeRemoteRequest();
-            // UpdateNodes only have one use for MRAppMaster, MRAppMaster use 
the updateNodes to find which
-            // nodes are bad nodes. So we clear them, MRAppMaster will not 
recompute the map tasks.
-            response.getUpdatedNodes().clear();
-            return response;
-          }
-        };
+        this.containerAllocator = 
HadoopShimImpl.createRMContainerAllocator(this.clientService, this.context);
       }
 
       ((Service)this.containerAllocator).init(this.getConfig());
diff --git 
a/client-mr/src/test/java/org/apache/hadoop/mapred/SortWriteBufferManagerTest.java
 
b/client-mr/core/src/test/java/org/apache/hadoop/mapred/SortWriteBufferManagerTest.java
similarity index 100%
rename from 
client-mr/src/test/java/org/apache/hadoop/mapred/SortWriteBufferManagerTest.java
rename to 
client-mr/core/src/test/java/org/apache/hadoop/mapred/SortWriteBufferManagerTest.java
diff --git 
a/client-mr/src/test/java/org/apache/hadoop/mapred/SortWriteBufferTest.java 
b/client-mr/core/src/test/java/org/apache/hadoop/mapred/SortWriteBufferTest.java
similarity index 100%
rename from 
client-mr/src/test/java/org/apache/hadoop/mapred/SortWriteBufferTest.java
rename to 
client-mr/core/src/test/java/org/apache/hadoop/mapred/SortWriteBufferTest.java
diff --git 
a/client-mr/src/test/java/org/apache/hadoop/mapreduce/RssMRUtilsTest.java 
b/client-mr/core/src/test/java/org/apache/hadoop/mapreduce/RssMRUtilsTest.java
similarity index 100%
rename from 
client-mr/src/test/java/org/apache/hadoop/mapreduce/RssMRUtilsTest.java
rename to 
client-mr/core/src/test/java/org/apache/hadoop/mapreduce/RssMRUtilsTest.java
diff --git 
a/client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcherTest.java
 
b/client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcherTest.java
similarity index 100%
rename from 
client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcherTest.java
rename to 
client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/EventFetcherTest.java
diff --git 
a/client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/FetcherTest.java
 
b/client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/FetcherTest.java
similarity index 99%
rename from 
client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/FetcherTest.java
rename to 
client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/FetcherTest.java
index 6f1da6d3..7fd68730 100644
--- 
a/client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/FetcherTest.java
+++ 
b/client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/FetcherTest.java
@@ -74,6 +74,7 @@ import org.apache.uniffle.common.compression.Lz4Codec;
 import org.apache.uniffle.common.config.RssConf;
 import org.apache.uniffle.common.exception.RssException;
 import org.apache.uniffle.common.util.JavaUtils;
+import org.apache.uniffle.hadoop.shim.HadoopShimImpl;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
@@ -86,7 +87,7 @@ public class FetcherTest {
   static LocalDirAllocator lda = new LocalDirAllocator(MRConfig.LOCAL_DIR);
 
   static TaskStatus taskStatus = new MockedTaskStatus();
-  static ShuffleClientMetrics metrics = new ShuffleClientMetrics(reduceId1, 
jobConf);
+  static ShuffleClientMetrics metrics = 
HadoopShimImpl.createShuffleClientMetrics(reduceId1, jobConf);
   static Reporter reporter = new MockedReporter();
   static FileSystem fs;
   static List<byte[]> data;
diff --git 
a/client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMergerTest.java
 
b/client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMergerTest.java
similarity index 100%
rename from 
client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMergerTest.java
rename to 
client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssInMemoryRemoteMergerTest.java
diff --git 
a/client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerTest.java
 
b/client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerTest.java
similarity index 100%
rename from 
client-mr/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerTest.java
rename to 
client-mr/core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/RssRemoteMergeManagerTest.java
diff --git a/client/pom.xml b/client-mr/hadoop2.8/pom.xml
similarity index 60%
copy from client/pom.xml
copy to client-mr/hadoop2.8/pom.xml
index 527bad78..24386687 100644
--- a/client/pom.xml
+++ b/client-mr/hadoop2.8/pom.xml
@@ -17,48 +17,38 @@
   -->
 
 <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-    xmlns="http://maven.apache.org/POM/4.0.0";
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+         xmlns="http://maven.apache.org/POM/4.0.0";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.uniffle</groupId>
     <artifactId>uniffle-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
   </parent>
-
-  <groupId>org.apache.uniffle</groupId>
-  <artifactId>rss-client</artifactId>
-  <version>0.8.0-SNAPSHOT</version>
-  <packaging>jar</packaging>
-  <name>Apache Uniffle Client</name>
-
+  <artifactId>hadoop2.8-shim</artifactId>
+  <name>Apache Uniffle Hadoop2.8 Shims</name>
   <dependencies>
     <dependency>
-      <groupId>org.apache.uniffle</groupId>
-      <artifactId>shuffle-storage</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.uniffle</groupId>
-      <artifactId>shuffle-storage</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-inline</artifactId>
-      <scope>test</scope>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minicluster</artifactId>
+      <artifactId>hadoop-mapreduce-client-app</artifactId>
+      <version>${hadoop.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git 
a/client-mr/hadoop2.8/src/main/java/org/apache/uniffle/hadoop/shim/HadoopShimImpl.java
 
b/client-mr/hadoop2.8/src/main/java/org/apache/uniffle/hadoop/shim/HadoopShimImpl.java
new file mode 100644
index 00000000..0abb1d9e
--- /dev/null
+++ 
b/client-mr/hadoop2.8/src/main/java/org/apache/uniffle/hadoop/shim/HadoopShimImpl.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.hadoop.shim;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.task.reduce.ShuffleClientMetrics;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
+import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator;
+import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+
+public class HadoopShimImpl {
+
+  private static final Log LOG = LogFactory.getLog(HadoopShimImpl.class);
+
+  public static ShuffleClientMetrics createShuffleClientMetrics(TaskAttemptID 
taskAttemptID, JobConf jobConf) {
+    try {
+      Constructor constructor =
+          ShuffleClientMetrics.class.getDeclaredConstructor(new Class[] 
{TaskAttemptID.class, JobConf.class});
+      constructor.setAccessible(true);
+      return (ShuffleClientMetrics) constructor.newInstance(taskAttemptID, 
jobConf);
+    } catch (Exception e) {
+      LOG.warn("Construct ShuffleClientMetrics fail, caused by {}", e);
+      return null;
+    }
+  }
+
+  public static RMContainerAllocator createRMContainerAllocator(ClientService 
clientService, AppContext context) {
+    return new RMContainerAllocator(clientService, context) {
+      @Override
+      protected AllocateResponse makeRemoteRequest() throws YarnException, 
IOException {
+        AllocateResponse response = super.makeRemoteRequest();
+        // UpdateNodes only have one use for MRAppMaster, MRAppMaster use the 
updateNodes to find which
+        // nodes are bad nodes. So we clear them, MRAppMaster will not 
recompute the map tasks.
+        response.getUpdatedNodes().clear();
+        return response;
+      }
+    };
+  }
+}
diff --git a/client/pom.xml b/client-mr/hadoop3.2/pom.xml
similarity index 60%
copy from client/pom.xml
copy to client-mr/hadoop3.2/pom.xml
index 527bad78..56d058f3 100644
--- a/client/pom.xml
+++ b/client-mr/hadoop3.2/pom.xml
@@ -17,48 +17,42 @@
   -->
 
 <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-    xmlns="http://maven.apache.org/POM/4.0.0";
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+         xmlns="http://maven.apache.org/POM/4.0.0";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.uniffle</groupId>
     <artifactId>uniffle-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
   </parent>
-
-  <groupId>org.apache.uniffle</groupId>
-  <artifactId>rss-client</artifactId>
-  <version>0.8.0-SNAPSHOT</version>
-  <packaging>jar</packaging>
-  <name>Apache Uniffle Client</name>
-
+  <artifactId>hadoop3.2-shim</artifactId>
+  <name>Apache Uniffle Hadoop3.2 Shims</name>
   <dependencies>
     <dependency>
-      <groupId>org.apache.uniffle</groupId>
-      <artifactId>shuffle-storage</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.uniffle</groupId>
-      <artifactId>shuffle-storage</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-inline</artifactId>
-      <scope>test</scope>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.google.protobuf</groupId>
+          <artifactId>protobuf-java</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minicluster</artifactId>
+      <artifactId>hadoop-mapreduce-client-app</artifactId>
+      <version>${hadoop.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git 
a/client-mr/hadoop3.2/src/main/java/org/apache/uniffle/hadoop/shim/HadoopShimImpl.java
 
b/client-mr/hadoop3.2/src/main/java/org/apache/uniffle/hadoop/shim/HadoopShimImpl.java
new file mode 100644
index 00000000..e24cc2e5
--- /dev/null
+++ 
b/client-mr/hadoop3.2/src/main/java/org/apache/uniffle/hadoop/shim/HadoopShimImpl.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.uniffle.hadoop.shim;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.task.reduce.ShuffleClientMetrics;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
+import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator;
+import org.apache.hadoop.mapreduce.v2.app.rm.preemption.NoopAMPreemptionPolicy;
+import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+
+public class HadoopShimImpl {
+
+  public static ShuffleClientMetrics createShuffleClientMetrics(TaskAttemptID 
taskAttemptID, JobConf jobConf) {
+    return ShuffleClientMetrics.create(taskAttemptID, jobConf);
+  }
+
+  public static RMContainerAllocator createRMContainerAllocator(ClientService 
clientService, AppContext context) {
+    return new RMContainerAllocator(clientService, context, new 
NoopAMPreemptionPolicy()) {
+      @Override
+      protected AllocateResponse makeRemoteRequest() throws YarnException, 
IOException {
+        AllocateResponse response = super.makeRemoteRequest();
+        // UpdateNodes only have one use for MRAppMaster, MRAppMaster use the 
updateNodes to find which
+        // nodes are bad nodes. So we clear them, MRAppMaster will not 
recompute the map tasks.
+        response.getUpdatedNodes().clear();
+        return response;
+      }
+    };
+  }
+}
diff --git 
a/client-spark/common/src/test/java/org/apache/spark/shuffle/reader/RssShuffleDataIteratorTest.java
 
b/client-spark/common/src/test/java/org/apache/spark/shuffle/reader/RssShuffleDataIteratorTest.java
index ce92f349..31c5b906 100644
--- 
a/client-spark/common/src/test/java/org/apache/spark/shuffle/reader/RssShuffleDataIteratorTest.java
+++ 
b/client-spark/common/src/test/java/org/apache/spark/shuffle/reader/RssShuffleDataIteratorTest.java
@@ -23,7 +23,7 @@ import java.util.Map;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.apache.commons.lang.reflect.FieldUtils;
+import org.apache.commons.lang3.reflect.FieldUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
diff --git 
a/client-spark/common/src/test/java/org/apache/spark/shuffle/writer/WriteBufferManagerTest.java
 
b/client-spark/common/src/test/java/org/apache/spark/shuffle/writer/WriteBufferManagerTest.java
index 5e5e6dfb..44cd9124 100644
--- 
a/client-spark/common/src/test/java/org/apache/spark/shuffle/writer/WriteBufferManagerTest.java
+++ 
b/client-spark/common/src/test/java/org/apache/spark/shuffle/writer/WriteBufferManagerTest.java
@@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
 
 import com.google.common.collect.Maps;
-import org.apache.commons.lang.reflect.FieldUtils;
+import org.apache.commons.lang3.reflect.FieldUtils;
 import org.apache.spark.SparkConf;
 import org.apache.spark.executor.ShuffleWriteMetrics;
 import org.apache.spark.memory.TaskMemoryManager;
diff --git a/client-tez/pom.xml b/client-tez/pom.xml
index b3a19054..4188c1f6 100644
--- a/client-tez/pom.xml
+++ b/client-tez/pom.xml
@@ -82,6 +82,12 @@
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-common</artifactId>
+        <exclusions>
+          <exclusion>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-json</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
diff --git a/client-tez/src/main/java/org/apache/tez/common/RssTezUtils.java 
b/client-tez/src/main/java/org/apache/tez/common/RssTezUtils.java
index 694147da..69837f73 100644
--- a/client-tez/src/main/java/org/apache/tez/common/RssTezUtils.java
+++ b/client-tez/src/main/java/org/apache/tez/common/RssTezUtils.java
@@ -124,7 +124,7 @@ public class RssTezUtils {
         res.add(tmp);
       }
     }
-    return org.apache.commons.lang.StringUtils.join(res, COMMA_DELIMITER);
+    return StringUtils.join(res, COMMA_DELIMITER);
   }
 
   public static Map<String, List<String>> uniformServerToPartitions(String 
partitionToServers) {
@@ -151,11 +151,11 @@ public class RssTezUtils {
     List<String> res = new ArrayList<>();
     Set<String> keySet = map.keySet();
     for (String s : keySet) {
-      String join = org.apache.commons.lang.StringUtils.join(map.get(s), 
UNDERLINE_DELIMITER);
+      String join = StringUtils.join(map.get(s), UNDERLINE_DELIMITER);
       res.add(s + PLUS_DELIMITER + join);
     }
 
-    return org.apache.commons.lang.StringUtils.join(res,COMMA_DELIMITER);
+    return StringUtils.join(res,COMMA_DELIMITER);
   }
 
   public static ApplicationAttemptId getApplicationAttemptId() {
diff --git a/client/pom.xml b/client/pom.xml
index 527bad78..cc3bceb2 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -55,6 +55,16 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.google.protobuf</groupId>
+          <artifactId>protobuf-java</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
diff --git a/common/pom.xml b/common/pom.xml
index 314828b7..daf590b9 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -55,6 +55,12 @@
     <dependency>
       <groupId>io.prometheus</groupId>
       <artifactId>simpleclient_jetty</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-servlet</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>io.prometheus</groupId>
@@ -93,6 +99,10 @@
           <groupId>com.google.protobuf</groupId>
           <artifactId>protobuf-java</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -123,6 +133,18 @@
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-api</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-servlet</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-server</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
@@ -136,4 +158,21 @@
     </plugins>
   </build>
 
+  <profiles>
+    <profile>
+      <id>hadoop3.2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.bouncycastle</groupId>
+          <artifactId>bcprov-jdk15on</artifactId>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>junit</groupId>
+          <artifactId>junit</artifactId>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
 </project>
diff --git a/coordinator/pom.xml b/coordinator/pom.xml
index df147d3f..e5abe9a3 100644
--- a/coordinator/pom.xml
+++ b/coordinator/pom.xml
@@ -75,6 +75,12 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
diff --git a/integration-test/common/pom.xml b/integration-test/common/pom.xml
index abdb0de0..8fb44c0f 100644
--- a/integration-test/common/pom.xml
+++ b/integration-test/common/pom.xml
@@ -81,6 +81,12 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-minicluster</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-json</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
@@ -164,4 +170,16 @@
             </plugins>
         </pluginManagement>
     </build>
+    <profiles>
+        <profile>
+            <id>hadoop3.2</id>
+            <dependencies>
+                <dependency>
+                    <groupId>junit</groupId>
+                    <artifactId>junit</artifactId>
+                    <scope>test</scope>
+                </dependency>
+            </dependencies>
+        </profile>
+    </profiles>
 </project>
diff --git a/integration-test/mr/pom.xml b/integration-test/mr/pom.xml
index 05597902..91e2cf79 100644
--- a/integration-test/mr/pom.xml
+++ b/integration-test/mr/pom.xml
@@ -96,6 +96,12 @@
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-common</artifactId>
             <version>${hadoop.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-json</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>org.apache.hadoop</groupId>
diff --git 
a/integration-test/mr/src/test/java/org/apache/uniffle/test/MRIntegrationTestBase.java
 
b/integration-test/mr/src/test/java/org/apache/uniffle/test/MRIntegrationTestBase.java
index ed22f953..a2cad2ce 100644
--- 
a/integration-test/mr/src/test/java/org/apache/uniffle/test/MRIntegrationTestBase.java
+++ 
b/integration-test/mr/src/test/java/org/apache/uniffle/test/MRIntegrationTestBase.java
@@ -115,6 +115,7 @@ public class MRIntegrationTestBase extends 
IntegrationTestBase {
   private void updateCommonConfiguration(Configuration jobConf) {
     long mapMb = MRJobConfig.DEFAULT_MAP_MEMORY_MB;
     jobConf.set(MRJobConfig.MAP_JAVA_OPTS, "-Xmx" + mapMb + "m");
+    jobConf.setInt(MRJobConfig.MAP_MEMORY_MB, 500);
   }
 
   private void runOriginApp(Configuration jobConf) throws Exception {
@@ -138,7 +139,7 @@ public class MRIntegrationTestBase extends 
IntegrationTestBase {
     jobConf.set(MRConfig.SHUFFLE_CONSUMER_PLUGIN, 
"org.apache.hadoop.mapreduce.task.reduce.RssShuffle");
     jobConf.set(RssMRConfig.RSS_REDUCE_REMOTE_SPILL_ENABLED, "true");
 
-    File file = new File(parentPath, "client-mr/target/shaded");
+    File file = new File(parentPath, "client-mr/core/target/shaded");
     File[] jars = file.listFiles();
     File localFile = null;
     for (File jar : jars) {
diff --git a/pom.xml b/pom.xml
index 582a44e4..670e6862 100644
--- a/pom.xml
+++ b/pom.xml
@@ -352,6 +352,10 @@
             <groupId>net.minidev</groupId>
             <artifactId>json-smart</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-json</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -649,6 +653,23 @@
         <version>1.5.0</version>
       </dependency>
 
+
+      <dependency>
+        <groupId>org.eclipse.jetty</groupId>
+        <artifactId>jetty-servlet</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.eclipse.jetty</groupId>
+        <artifactId>jetty-server</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.eclipse.jetty</groupId>
+        <artifactId>jetty-util</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+
     </dependencies>
   </dependencyManagement>
 
@@ -1602,8 +1623,9 @@
     <profile>
       <id>mr</id>
       <modules>
-        <module>client-mr</module>
+        <module>client-mr/core</module>
         <module>integration-test/mr</module>
+        <module>client-mr/hadoop${hadoop.short.version}</module>
       </modules>
       <dependencyManagement>
         <dependencies>
@@ -1620,6 +1642,11 @@
             <version>${hadoop.version}</version>
             <scope>test</scope>
           </dependency>
+          <dependency>
+            <groupId>org.apache.uniffle</groupId>
+            <artifactId>hadoop${hadoop.short.version}-shim</artifactId>
+            <version>${project.version}</version>
+          </dependency>
         </dependencies>
       </dependencyManagement>
     </profile>
@@ -1668,6 +1695,12 @@
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-common</artifactId>
             <version>${hadoop.version}</version>
+            <exclusions>
+              <exclusion>
+                <groupId>com.sun.jersey</groupId>
+                <artifactId>jersey-json</artifactId>
+              </exclusion>
+            </exclusions>
           </dependency>
           <dependency>
             <groupId>com.google.protobuf</groupId>
@@ -1694,5 +1727,39 @@
         <module>deploy/kubernetes</module>
       </modules>
     </profile>
+    <profile>
+      <id>hadoop2.8</id>
+      <properties>
+        <hadoop.version>2.8.5</hadoop.version>
+        <hadoop.short.version>2.8</hadoop.short.version>
+      </properties>
+    </profile>
+    <profile>
+      <id>hadoop3.2</id>
+      <properties>
+        <hadoop.version>3.2.1</hadoop.version>
+        <hadoop.short.version>3.2</hadoop.short.version>
+        <commons-codec.version>1.11</commons-codec.version>
+        <metrics.version>3.2.4</metrics.version>
+        <bouncycastle.version>1.60</bouncycastle.version>
+        <junit4.version>4.11</junit4.version>
+      </properties>
+      <dependencyManagement>
+        <dependencies>
+          <dependency>
+            <groupId>org.bouncycastle</groupId>
+            <artifactId>bcprov-jdk15on</artifactId>
+            <version>${bouncycastle.version}</version>
+            <scope>test</scope>
+          </dependency>
+          <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>${junit4.version}</version>
+            <scope>test</scope>
+          </dependency>
+        </dependencies>
+      </dependencyManagement>
+    </profile>
   </profiles>
 </project>
diff --git a/server/pom.xml b/server/pom.xml
index 97907a1e..78586b71 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -86,6 +86,12 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-minicluster</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
@@ -101,6 +107,10 @@
           <groupId>io.prometheus</groupId>
           <artifactId>simpleclient_jetty</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -209,4 +219,16 @@
       </plugin>
     </plugins>
   </build>
+  <profiles>
+    <profile>
+      <id>hadoop3.2</id>
+      <dependencies>
+        <dependency>
+          <groupId>junit</groupId>
+          <artifactId>junit</artifactId>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
 </project>
diff --git 
a/server/src/test/java/org/apache/uniffle/server/storage/LocalStorageManagerTest.java
 
b/server/src/test/java/org/apache/uniffle/server/storage/LocalStorageManagerTest.java
index 049ee35d..c016e13b 100644
--- 
a/server/src/test/java/org/apache/uniffle/server/storage/LocalStorageManagerTest.java
+++ 
b/server/src/test/java/org/apache/uniffle/server/storage/LocalStorageManagerTest.java
@@ -27,7 +27,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.lang.SystemUtils;
+import org.apache.commons.lang3.SystemUtils;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
diff --git a/storage/pom.xml b/storage/pom.xml
index 39f99b02..4caa6d86 100644
--- a/storage/pom.xml
+++ b/storage/pom.xml
@@ -45,6 +45,12 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-minicluster</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
 
     <dependency>
@@ -105,4 +111,16 @@
       </plugin>
     </plugins>
   </build>
+  <profiles>
+    <profile>
+      <id>hadoop3.2</id>
+      <dependencies>
+        <dependency>
+          <groupId>junit</groupId>
+          <artifactId>junit</artifactId>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
 </project>
diff --git 
a/storage/src/main/java/org/apache/uniffle/storage/common/DefaultStorageMediaProvider.java
 
b/storage/src/main/java/org/apache/uniffle/storage/common/DefaultStorageMediaProvider.java
index 7204c575..1d34451e 100644
--- 
a/storage/src/main/java/org/apache/uniffle/storage/common/DefaultStorageMediaProvider.java
+++ 
b/storage/src/main/java/org/apache/uniffle/storage/common/DefaultStorageMediaProvider.java
@@ -28,8 +28,8 @@ import java.util.Arrays;
 import java.util.List;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.SystemUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.SystemUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git 
a/storage/src/main/java/org/apache/uniffle/storage/handler/impl/PooledHadoopShuffleWriteHandler.java
 
b/storage/src/main/java/org/apache/uniffle/storage/handler/impl/PooledHadoopShuffleWriteHandler.java
index 31113ce5..2bb55211 100644
--- 
a/storage/src/main/java/org/apache/uniffle/storage/handler/impl/PooledHadoopShuffleWriteHandler.java
+++ 
b/storage/src/main/java/org/apache/uniffle/storage/handler/impl/PooledHadoopShuffleWriteHandler.java
@@ -22,7 +22,7 @@ import java.util.concurrent.LinkedBlockingDeque;
 import java.util.function.Function;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git 
a/storage/src/main/java/org/apache/uniffle/storage/util/ShuffleStorageUtils.java
 
b/storage/src/main/java/org/apache/uniffle/storage/util/ShuffleStorageUtils.java
index d9599d71..de27a2cb 100644
--- 
a/storage/src/main/java/org/apache/uniffle/storage/util/ShuffleStorageUtils.java
+++ 
b/storage/src/main/java/org/apache/uniffle/storage/util/ShuffleStorageUtils.java
@@ -25,7 +25,7 @@ import java.util.Collections;
 import java.util.List;
 
 import com.google.common.collect.Lists;
-import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.hash.MurmurHash;

Reply via email to