This is an automated email from the ASF dual-hosted git repository.
enricomi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-uniffle.git
The following commit(s) were added to refs/heads/master by this push:
new ea1070c6b [#1805][part-1] build: Support Scala 2.13 (#1806)
ea1070c6b is described below
commit ea1070c6b9eb543b0e91864ddae7da68fcce7aa0
Author: Enrico Minack <[email protected]>
AuthorDate: Wed Jun 19 15:31:04 2024 +0200
[#1805][part-1] build: Support Scala 2.13 (#1806)
### What changes were proposed in this pull request?
Support compiling Spark client against Scala 2.13 dependencies.
### Why are the changes needed?
Starting with Spark 4, Scala 2.13 is required.
Contributes to: #1805
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
CI
---
.github/workflows/parallel.yml | 1 +
.../spark/shuffle/writer/RssShuffleWriterTest.java | 53 +++++++++++-----------
.../org/apache/uniffle/test/GetReaderTest.java | 2 +-
pom.xml | 7 +++
4 files changed, 35 insertions(+), 28 deletions(-)
diff --git a/.github/workflows/parallel.yml b/.github/workflows/parallel.yml
index 5468e57c1..5ea5cd67b 100644
--- a/.github/workflows/parallel.yml
+++ b/.github/workflows/parallel.yml
@@ -60,6 +60,7 @@ jobs:
- spark3.3
- spark3.4
- spark3.5
+ - spark3.5-scala2.13
- mr-hadoop2.8
- mr-hadoop3.2
- tez
diff --git
a/client-spark/spark3/src/test/java/org/apache/spark/shuffle/writer/RssShuffleWriterTest.java
b/client-spark/spark3/src/test/java/org/apache/spark/shuffle/writer/RssShuffleWriterTest.java
index 9e442e141..53a8e7143 100644
---
a/client-spark/spark3/src/test/java/org/apache/spark/shuffle/writer/RssShuffleWriterTest.java
+++
b/client-spark/spark3/src/test/java/org/apache/spark/shuffle/writer/RssShuffleWriterTest.java
@@ -32,7 +32,7 @@ import java.util.stream.Collectors;
import scala.Product2;
import scala.Tuple2;
-import scala.collection.mutable.MutableList;
+import scala.collection.immutable.Nil;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@@ -77,15 +77,13 @@ import static org.mockito.Mockito.when;
public class RssShuffleWriterTest {
- private MutableList<Product2<String, String>> createMockRecords() {
- MutableList<Product2<String, String>> data = new MutableList<>();
- data.appendElem(new Tuple2<>("testKey2", "testValue2"));
- data.appendElem(new Tuple2<>("testKey3", "testValue3"));
- data.appendElem(new Tuple2<>("testKey4", "testValue4"));
- data.appendElem(new Tuple2<>("testKey6", "testValue6"));
- data.appendElem(new Tuple2<>("testKey1", "testValue1"));
- data.appendElem(new Tuple2<>("testKey5", "testValue5"));
- return data;
+ private scala.collection.immutable.List<Product2<String, String>>
createMockRecords() {
+ return Nil.$colon$colon(new Tuple2<>("testKey2", "testValue2"))
+ .$colon$colon(new Tuple2<>("testKey3", "testValue3"))
+ .$colon$colon(new Tuple2<>("testKey4", "testValue4"))
+ .$colon$colon(new Tuple2<>("testKey6", "testValue6"))
+ .$colon$colon(new Tuple2<>("testKey1", "testValue1"))
+ .$colon$colon(new Tuple2<>("testKey5", "testValue5"));
}
private MutableShuffleHandleInfo createMutableShuffleHandle() {
@@ -275,7 +273,7 @@ public class RssShuffleWriterTest {
// case1: the reassignment will refresh the following plan. So the failure
will only occur one
// time.
- MutableList<Product2<String, String>> mockedData = createMockRecords();
+ scala.collection.immutable.List<Product2<String, String>> mockedData =
createMockRecords();
writer.write(mockedData.iterator());
Awaitility.await()
@@ -330,7 +328,7 @@ public class RssShuffleWriterTest {
// case1: the reassignment will refresh the following plan. So the failure
will only occur one
// time.
- MutableList<Product2<String, String>> mockedData = createMockRecords();
+ scala.collection.immutable.List<Product2<String, String>> mockedData =
createMockRecords();
writer.write(mockedData.iterator());
Awaitility.await()
@@ -469,7 +467,7 @@ public class RssShuffleWriterTest {
doNothing().when(rssShuffleWriterSpy).sendCommit();
// case 1. failed blocks will be resent
- MutableList<Product2<String, String>> data = createMockRecords();
+ scala.collection.immutable.List<Product2<String, String>> data =
createMockRecords();
rssShuffleWriterSpy.write(data.iterator());
Awaitility.await()
@@ -520,7 +518,7 @@ public class RssShuffleWriterTest {
});
manager.setDataPusher(alwaysFailedDataPusher);
- MutableList<Product2<String, String>> mockedData = createMockRecords();
+ scala.collection.immutable.List<Product2<String, String>> mockedData =
createMockRecords();
try {
rssShuffleWriter.write(mockedData.iterator());
@@ -741,12 +739,13 @@ public class RssShuffleWriterTest {
contextMock);
rssShuffleWriter.getBufferManager().setSpillFunc(rssShuffleWriter::processShuffleBlockInfos);
- MutableList<Product2<String, String>> data = new MutableList<>();
- // One record is 26 bytes
- data.appendElem(new Tuple2<>("Key", "Value11111111111111"));
- data.appendElem(new Tuple2<>("Key", "Value11111111111111"));
- data.appendElem(new Tuple2<>("Key", "Value11111111111111"));
- data.appendElem(new Tuple2<>("Key", "Value11111111111111"));
+ scala.collection.immutable.List<Product2<String, String>> data =
+ Nil
+ // One record is 26 bytes
+ .$colon$colon(new Tuple2<>("Key", "Value11111111111111"))
+ .$colon$colon(new Tuple2<>("Key", "Value11111111111111"))
+ .$colon$colon(new Tuple2<>("Key", "Value11111111111111"))
+ .$colon$colon(new Tuple2<>("Key", "Value11111111111111"));
// case1: all blocks are sent and pass the blocks check when spill is
triggered
rssShuffleWriter.write(data.iterator());
@@ -867,13 +866,13 @@ public class RssShuffleWriterTest {
doNothing().when(rssShuffleWriterSpy).sendCommit();
// case 1
- MutableList<Product2<String, String>> data = new MutableList<>();
- data.appendElem(new Tuple2<>("testKey2", "testValue2"));
- data.appendElem(new Tuple2<>("testKey3", "testValue3"));
- data.appendElem(new Tuple2<>("testKey4", "testValue4"));
- data.appendElem(new Tuple2<>("testKey6", "testValue6"));
- data.appendElem(new Tuple2<>("testKey1", "testValue1"));
- data.appendElem(new Tuple2<>("testKey5", "testValue5"));
+ scala.collection.immutable.List<Product2<String, String>> data =
+ Nil.$colon$colon(new Tuple2<>("testKey2", "testValue2"))
+ .$colon$colon(new Tuple2<>("testKey3", "testValue3"))
+ .$colon$colon(new Tuple2<>("testKey4", "testValue4"))
+ .$colon$colon(new Tuple2<>("testKey6", "testValue6"))
+ .$colon$colon(new Tuple2<>("testKey1", "testValue1"))
+ .$colon$colon(new Tuple2<>("testKey5", "testValue5"));
rssShuffleWriterSpy.write(data.iterator());
assertTrue(shuffleWriteMetrics.writeTime() > 0);
diff --git
a/integration-test/spark3/src/test/java/org/apache/uniffle/test/GetReaderTest.java
b/integration-test/spark3/src/test/java/org/apache/uniffle/test/GetReaderTest.java
index e1b5dc037..986416e84 100644
---
a/integration-test/spark3/src/test/java/org/apache/uniffle/test/GetReaderTest.java
+++
b/integration-test/spark3/src/test/java/org/apache/uniffle/test/GetReaderTest.java
@@ -25,8 +25,8 @@ import java.util.concurrent.TimeUnit;
import scala.Option;
import scala.Tuple2;
-import scala.collection.Seq;
import scala.collection.immutable.Map;
+import scala.collection.immutable.Seq;
import com.google.common.util.concurrent.Uninterruptibles;
import org.apache.hadoop.conf.Configuration;
diff --git a/pom.xml b/pom.xml
index 8ea38c417..9dacdcc1a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2059,6 +2059,13 @@
</dependencyManagement>
</profile>
+ <profile>
+ <id>scala2.13</id>
+ <properties>
+ <scala.binary.version>2.13</scala.binary.version>
+ </properties>
+ </profile>
+
<profile>
<id>tez</id>
<modules>