This is an automated email from the ASF dual-hosted git repository.
aokolnychyi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git
The following commit(s) were added to refs/heads/master by this push:
new 118efb6 Spark: Use JavaSparkContext.fromSparkContext instead of
constructor (#2812)
118efb6 is described below
commit 118efb6cac1d6661bfffaad89a2c8f91a32ecfd6
Author: Anton Okolnychyi <[email protected]>
AuthorDate: Mon Jul 12 16:28:51 2021 -1000
Spark: Use JavaSparkContext.fromSparkContext instead of constructor (#2812)
---
.baseline/checkstyle/checkstyle.xml | 5 +++++
.../main/java/org/apache/iceberg/actions/RewriteDataFilesAction.java | 2 +-
.../main/java/org/apache/iceberg/spark/actions/BaseSparkAction.java | 4 ++--
.../java/org/apache/iceberg/spark/source/TestDataFrameWrites.java | 2 +-
.../java/org/apache/iceberg/spark/source/TestPartitionPruning.java | 2 +-
.../test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java | 2 +-
.../java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java | 2 +-
7 files changed, 12 insertions(+), 7 deletions(-)
diff --git a/.baseline/checkstyle/checkstyle.xml
b/.baseline/checkstyle/checkstyle.xml
index c5ce135..9df746f 100644
--- a/.baseline/checkstyle/checkstyle.xml
+++ b/.baseline/checkstyle/checkstyle.xml
@@ -46,6 +46,11 @@
<property name="format"
value="sparkContext\(\)\.hadoopConfiguration\(\)"/>
<property name="message" value="Are you sure that you want to use
sparkContext().hadoopConfiguration()? In most cases, you should use
sessionState().newHadoopConf() instead, so that the Hadoop configurations
specified in the Spark session configuration will come into effect."/>
</module>
+ <module name="RegexpSingleline">
+ <property name="fileExtensions" value="java"/>
+ <property name="format" value="new JavaSparkContext\(.*\)"/>
+ <property name="message" value="Prefer using
JavaSparkContext.fromSparkContext() instead of calling a constructor
directly."/>
+ </module>
<module name="SuppressionFilter"> <!-- baseline-gradle: README.md -->
<property name="file"
value="${config_loc}/checkstyle-suppressions.xml"/>
</module>
diff --git
a/spark/src/main/java/org/apache/iceberg/actions/RewriteDataFilesAction.java
b/spark/src/main/java/org/apache/iceberg/actions/RewriteDataFilesAction.java
index 735e719..a45aaea 100644
--- a/spark/src/main/java/org/apache/iceberg/actions/RewriteDataFilesAction.java
+++ b/spark/src/main/java/org/apache/iceberg/actions/RewriteDataFilesAction.java
@@ -40,7 +40,7 @@ public class RewriteDataFilesAction
RewriteDataFilesAction(SparkSession spark, Table table) {
super(table);
- this.sparkContext = new JavaSparkContext(spark.sparkContext());
+ this.sparkContext =
JavaSparkContext.fromSparkContext(spark.sparkContext());
}
@Override
diff --git
a/spark/src/main/java/org/apache/iceberg/spark/actions/BaseSparkAction.java
b/spark/src/main/java/org/apache/iceberg/spark/actions/BaseSparkAction.java
index fe7d980..785d9a3 100644
--- a/spark/src/main/java/org/apache/iceberg/spark/actions/BaseSparkAction.java
+++ b/spark/src/main/java/org/apache/iceberg/spark/actions/BaseSparkAction.java
@@ -64,7 +64,7 @@ abstract class BaseSparkAction<ThisT, R> implements
Action<ThisT, R> {
protected BaseSparkAction(SparkSession spark) {
this.spark = spark;
- this.sparkContext = new JavaSparkContext(spark.sparkContext());
+ this.sparkContext =
JavaSparkContext.fromSparkContext(spark.sparkContext());
}
protected SparkSession spark() {
@@ -115,7 +115,7 @@ abstract class BaseSparkAction<ThisT, R> implements
Action<ThisT, R> {
}
protected Dataset<Row> buildValidDataFileDF(Table table) {
- JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
+ JavaSparkContext context =
JavaSparkContext.fromSparkContext(spark.sparkContext());
Broadcast<FileIO> ioBroadcast =
context.broadcast(SparkUtil.serializableFileIO(table));
Dataset<ManifestFileBean> allManifests = loadMetadataTable(table,
ALL_MANIFESTS)
diff --git
a/spark/src/test/java/org/apache/iceberg/spark/source/TestDataFrameWrites.java
b/spark/src/test/java/org/apache/iceberg/spark/source/TestDataFrameWrites.java
index 015b819..f8f66ca 100644
---
a/spark/src/test/java/org/apache/iceberg/spark/source/TestDataFrameWrites.java
+++
b/spark/src/test/java/org/apache/iceberg/spark/source/TestDataFrameWrites.java
@@ -121,7 +121,7 @@ public abstract class TestDataFrameWrites extends
AvroDataTest {
@BeforeClass
public static void startSpark() {
TestDataFrameWrites.spark =
SparkSession.builder().master("local[2]").getOrCreate();
- TestDataFrameWrites.sc = new JavaSparkContext(spark.sparkContext());
+ TestDataFrameWrites.sc =
JavaSparkContext.fromSparkContext(spark.sparkContext());
}
@AfterClass
diff --git
a/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java
b/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java
index 79e6603..3cba747 100644
---
a/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java
+++
b/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java
@@ -105,7 +105,7 @@ public abstract class TestPartitionPruning {
@BeforeClass
public static void startSpark() {
TestPartitionPruning.spark =
SparkSession.builder().master("local[2]").getOrCreate();
- TestPartitionPruning.sparkContext = new
JavaSparkContext(spark.sparkContext());
+ TestPartitionPruning.sparkContext =
JavaSparkContext.fromSparkContext(spark.sparkContext());
String optionKey = String.format("fs.%s.impl",
CountOpenLocalFileSystem.scheme);
CONF.set(optionKey, CountOpenLocalFileSystem.class.getName());
diff --git
a/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java
b/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java
index 227e307..637ad7c 100644
--- a/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java
+++ b/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java
@@ -105,7 +105,7 @@ public abstract class TestSparkDataFile {
@BeforeClass
public static void startSpark() {
TestSparkDataFile.spark =
SparkSession.builder().master("local[2]").getOrCreate();
- TestSparkDataFile.sparkContext = new
JavaSparkContext(spark.sparkContext());
+ TestSparkDataFile.sparkContext =
JavaSparkContext.fromSparkContext(spark.sparkContext());
}
@AfterClass
diff --git
a/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java
b/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java
index 3f6f8ef..9716987 100644
---
a/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java
+++
b/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java
@@ -82,7 +82,7 @@ public abstract class TestWriteMetricsConfig {
@BeforeClass
public static void startSpark() {
TestWriteMetricsConfig.spark =
SparkSession.builder().master("local[2]").getOrCreate();
- TestWriteMetricsConfig.sc = new JavaSparkContext(spark.sparkContext());
+ TestWriteMetricsConfig.sc =
JavaSparkContext.fromSparkContext(spark.sparkContext());
}
@AfterClass