Repository: spark Updated Branches: refs/heads/master 280ff523f -> 3d4dd14cd
[SPARK-22066][BUILD] Update checkstyle to 8.2, enable it, fix violations ## What changes were proposed in this pull request? Update plugins, including scala-maven-plugin, to latest versions. Update checkstyle to 8.2. Remove bogus checkstyle config and enable it. Fix existing and new Java checkstyle errors. ## How was this patch tested? Existing tests Author: Sean Owen <[email protected]> Closes #19282 from srowen/SPARK-22066. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3d4dd14c Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3d4dd14c Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3d4dd14c Branch: refs/heads/master Commit: 3d4dd14cd563cef2aee791dc7c15d247c076f9a1 Parents: 280ff52 Author: Sean Owen <[email protected]> Authored: Wed Sep 20 10:01:46 2017 +0100 Committer: Sean Owen <[email protected]> Committed: Wed Sep 20 10:01:46 2017 +0100 ---------------------------------------------------------------------- assembly/pom.xml | 2 +- .../org/apache/spark/network/util/ByteUnit.java | 12 ++++----- .../spark/network/util/NettyMemoryMetrics.java | 2 +- common/sketch/pom.xml | 27 +++++++++---------- common/unsafe/pom.xml | 27 +++++++++---------- .../apache/spark/unsafe/types/UTF8String.java | 27 +++++++++++-------- .../spark/unsafe/types/UTF8StringSuite.java | 3 ++- core/pom.xml | 1 + .../apache/spark/io/ReadAheadInputStream.java | 21 ++++++++------- .../unsafe/sort/UnsafeSorterSpillReader.java | 5 ++-- .../spark/io/ReadAheadInputStreamSuite.java | 3 ++- dev/checkstyle.xml | 28 +++++++++----------- dev/test-dependencies.sh | 2 +- pom.xml | 28 +++++++++++--------- .../sources/v2/reader/DataSourceV2Reader.java | 9 +++---- .../v2/reader/SupportsScanUnsafeRow.java | 5 ++-- .../sources/v2/JavaAdvancedDataSourceV2.java | 4 ++- 17 files changed, 106 insertions(+), 100 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/assembly/pom.xml ---------------------------------------------------------------------- diff --git a/assembly/pom.xml b/assembly/pom.xml index cd8366a..01fe354 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -187,7 +187,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> - <version>3.0.0</version> + <version>3.1.0</version> <executions> <execution> <id>dist</id> http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java ---------------------------------------------------------------------- diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java index e097714..984575a 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java @@ -17,12 +17,12 @@ package org.apache.spark.network.util; public enum ByteUnit { - BYTE (1), - KiB (1024L), - MiB ((long) Math.pow(1024L, 2L)), - GiB ((long) Math.pow(1024L, 3L)), - TiB ((long) Math.pow(1024L, 4L)), - PiB ((long) Math.pow(1024L, 5L)); + BYTE(1), + KiB(1024L), + MiB((long) Math.pow(1024L, 2L)), + GiB((long) Math.pow(1024L, 3L)), + TiB((long) Math.pow(1024L, 4L)), + PiB((long) Math.pow(1024L, 5L)); ByteUnit(long multiplier) { this.multiplier = multiplier; http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java ---------------------------------------------------------------------- diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java b/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java index 969d9f6..77f807c 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java @@ -44,7 +44,7 @@ public class NettyMemoryMetrics implements MetricSet { private final String metricPrefix; @VisibleForTesting - final static Set<String> VERBOSE_METRICS = new HashSet<>(); + static final Set<String> VERBOSE_METRICS = new HashSet<>(); static { VERBOSE_METRICS.addAll(Arrays.asList( "numAllocations", http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/common/sketch/pom.xml ---------------------------------------------------------------------- diff --git a/common/sketch/pom.xml b/common/sketch/pom.xml index ca929d8..2d59c71 100644 --- a/common/sketch/pom.xml +++ b/common/sketch/pom.xml @@ -56,20 +56,17 @@ <build> <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory> <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory> - <pluginManagement> - <plugins> - <plugin> - <groupId>net.alchim31.maven</groupId> - <artifactId>scala-maven-plugin</artifactId> - <version>3.2.2</version> - <configuration> - <javacArgs combine.children="append"> - <!-- This option is needed to suppress warnings from sun.misc.Unsafe usage --> - <javacArg>-XDignore.symbol.file</javacArg> - </javacArgs> - </configuration> - </plugin> - </plugins> - </pluginManagement> + <plugins> + <plugin> + <groupId>net.alchim31.maven</groupId> + <artifactId>scala-maven-plugin</artifactId> + <configuration> + <javacArgs combine.children="append"> + <!-- This option is needed to suppress warnings from sun.misc.Unsafe usage --> + <javacArg>-XDignore.symbol.file</javacArg> + </javacArgs> + </configuration> + </plugin> + </plugins> </build> </project> http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/common/unsafe/pom.xml ---------------------------------------------------------------------- diff --git a/common/unsafe/pom.xml b/common/unsafe/pom.xml index 772b0ef..a3772a2 100644 --- a/common/unsafe/pom.xml +++ b/common/unsafe/pom.xml @@ -93,20 +93,17 @@ <build> <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory> <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory> - <pluginManagement> - <plugins> - <plugin> - <groupId>net.alchim31.maven</groupId> - <artifactId>scala-maven-plugin</artifactId> - <version>3.2.2</version> - <configuration> - <javacArgs combine.children="append"> - <!-- This option is needed to suppress warnings from sun.misc.Unsafe usage --> - <javacArg>-XDignore.symbol.file</javacArg> - </javacArgs> - </configuration> - </plugin> - </plugins> - </pluginManagement> + <plugins> + <plugin> + <groupId>net.alchim31.maven</groupId> + <artifactId>scala-maven-plugin</artifactId> + <configuration> + <javacArgs combine.children="append"> + <!-- This option is needed to suppress warnings from sun.misc.Unsafe usage --> + <javacArg>-XDignore.symbol.file</javacArg> + </javacArgs> + </configuration> + </plugin> + </plugins> </build> </project> http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java ---------------------------------------------------------------------- diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java index 76db0fb..ce4a06b 100644 --- a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java +++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java @@ -513,9 +513,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable, /** * Based on the given trim string, trim this string starting from both ends - * This method searches for each character in the source string, removes the character if it is found - * in the trim string, stops at the first not found. It calls the trimLeft first, then trimRight. - * It returns a new string in which both ends trim characters have been removed. + * This method searches for each character in the source string, removes the character if it is + * found in the trim string, stops at the first not found. It calls the trimLeft first, then + * trimRight. It returns a new string in which both ends trim characters have been removed. * @param trimString the trim character string */ public UTF8String trim(UTF8String trimString) { @@ -540,8 +540,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable, /** * Based on the given trim string, trim this string starting from left end - * This method searches each character in the source string starting from the left end, removes the character if it - * is in the trim string, stops at the first character which is not in the trim string, returns the new string. + * This method searches each character in the source string starting from the left end, removes + * the character if it is in the trim string, stops at the first character which is not in the + * trim string, returns the new string. * @param trimString the trim character string */ public UTF8String trimLeft(UTF8String trimString) { @@ -552,7 +553,8 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable, int trimIdx = 0; while (srchIdx < numBytes) { - UTF8String searchChar = copyUTF8String(srchIdx, srchIdx + numBytesForFirstByte(this.getByte(srchIdx)) - 1); + UTF8String searchChar = copyUTF8String( + srchIdx, srchIdx + numBytesForFirstByte(this.getByte(srchIdx)) - 1); int searchCharBytes = searchChar.numBytes; // try to find the matching for the searchChar in the trimString set if (trimString.find(searchChar, 0) >= 0) { @@ -587,8 +589,9 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable, /** * Based on the given trim string, trim this string starting from right end - * This method searches each character in the source string starting from the right end, removes the character if it - * is in the trim string, stops at the first character which is not in the trim string, returns the new string. + * This method searches each character in the source string starting from the right end, + * removes the character if it is in the trim string, stops at the first character which is not + * in the trim string, returns the new string. * @param trimString the trim character string */ public UTF8String trimRight(UTF8String trimString) { @@ -608,11 +611,13 @@ public final class UTF8String implements Comparable<UTF8String>, Externalizable, numChars ++; } - // index trimEnd points to the first no matching byte position from the right side of the source string. + // index trimEnd points to the first no matching byte position from the right side of + // the source string. int trimEnd = numBytes - 1; while (numChars > 0) { - UTF8String searchChar = - copyUTF8String(stringCharPos[numChars - 1], stringCharPos[numChars - 1] + stringCharLen[numChars - 1] - 1); + UTF8String searchChar = copyUTF8String( + stringCharPos[numChars - 1], + stringCharPos[numChars - 1] + stringCharLen[numChars - 1] - 1); if (trimString.find(searchChar, 0) >= 0) { trimEnd -= stringCharLen[numChars - 1]; } else { http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java ---------------------------------------------------------------------- diff --git a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java index f086001..7b03d2c 100644 --- a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java +++ b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java @@ -777,7 +777,8 @@ public class UTF8StringSuite { assertEquals(fromString("cc"), fromString("ccbaaaa").trimRight(fromString("ba"))); assertEquals(fromString(""), fromString("aabbbbaaa").trimRight(fromString("ab"))); assertEquals(fromString(" he"), fromString(" hello ").trimRight(fromString(" ol"))); - assertEquals(fromString("oohell"), fromString("oohellooo../*&").trimRight(fromString("./,&%*o"))); + assertEquals(fromString("oohell"), + fromString("oohellooo../*&").trimRight(fromString("./,&%*o"))); assertEquals(EMPTY_UTF8, fromString(" ").trimRight(fromString(" "))); http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/core/pom.xml ---------------------------------------------------------------------- diff --git a/core/pom.xml b/core/pom.xml index 431967e..da68abd 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -483,6 +483,7 @@ <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>exec-maven-plugin</artifactId> + <version>1.6.0</version> <executions> <execution> <id>sparkr-pkg</id> http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java index 618bd42..5b45d26 100644 --- a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java +++ b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java @@ -32,11 +32,12 @@ import java.util.concurrent.locks.ReentrantLock; /** * {@link InputStream} implementation which asynchronously reads ahead from the underlying input - * stream when specified amount of data has been read from the current buffer. It does it by maintaining - * two buffer - active buffer and read ahead buffer. Active buffer contains data which should be returned - * when a read() call is issued. The read ahead buffer is used to asynchronously read from the underlying - * input stream and once the current active buffer is exhausted, we flip the two buffers so that we can - * start reading from the read ahead buffer without being blocked in disk I/O. + * stream when specified amount of data has been read from the current buffer. It does it by + * maintaining two buffers - active buffer and read ahead buffer. Active buffer contains data + * which should be returned when a read() call is issued. The read ahead buffer is used to + * asynchronously read from the underlying input stream and once the current active buffer is + * exhausted, we flip the two buffers so that we can start reading from the read ahead buffer + * without being blocked in disk I/O. */ public class ReadAheadInputStream extends InputStream { @@ -83,7 +84,8 @@ public class ReadAheadInputStream extends InputStream { private final InputStream underlyingInputStream; - private final ExecutorService executorService = ThreadUtils.newDaemonSingleThreadExecutor("read-ahead"); + private final ExecutorService executorService = + ThreadUtils.newDaemonSingleThreadExecutor("read-ahead"); private final Condition asyncReadComplete = stateChangeLock.newCondition(); @@ -98,13 +100,14 @@ public class ReadAheadInputStream extends InputStream { * @param readAheadThresholdInBytes If the active buffer has less data than the read-ahead * threshold, an async read is triggered. */ - public ReadAheadInputStream(InputStream inputStream, int bufferSizeInBytes, int readAheadThresholdInBytes) { + public ReadAheadInputStream( + InputStream inputStream, int bufferSizeInBytes, int readAheadThresholdInBytes) { Preconditions.checkArgument(bufferSizeInBytes > 0, "bufferSizeInBytes should be greater than 0, but the value is " + bufferSizeInBytes); Preconditions.checkArgument(readAheadThresholdInBytes > 0 && readAheadThresholdInBytes < bufferSizeInBytes, - "readAheadThresholdInBytes should be greater than 0 and less than bufferSizeInBytes, but the" + - "value is " + readAheadThresholdInBytes); + "readAheadThresholdInBytes should be greater than 0 and less than bufferSizeInBytes, " + + "but the value is " + readAheadThresholdInBytes); activeBuffer = ByteBuffer.allocate(bufferSizeInBytes); readAheadBuffer = ByteBuffer.allocate(bufferSizeInBytes); this.readAheadThresholdInBytes = readAheadThresholdInBytes; http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java index 1e760b0..e2f48e5 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java @@ -76,9 +76,8 @@ public final class UnsafeSorterSpillReader extends UnsafeSorterIterator implemen SparkEnv.get() == null ? 0.5 : SparkEnv.get().conf().getDouble("spark.unsafe.sorter.spill.read.ahead.fraction", 0.5); - final boolean readAheadEnabled = - SparkEnv.get() == null ? false : - SparkEnv.get().conf().getBoolean("spark.unsafe.sorter.spill.read.ahead.enabled", true); + final boolean readAheadEnabled = SparkEnv.get() != null && + SparkEnv.get().conf().getBoolean("spark.unsafe.sorter.spill.read.ahead.enabled", true); final InputStream bs = new NioBufferedFileInputStream(file, (int) bufferSizeBytes); http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java ---------------------------------------------------------------------- diff --git a/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java b/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java index 5008f93..918ddc4 100644 --- a/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java +++ b/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java @@ -28,6 +28,7 @@ public class ReadAheadInputStreamSuite extends GenericFileInputStreamSuite { @Before public void setUp() throws IOException { super.setUp(); - inputStream = new ReadAheadInputStream(new NioBufferedFileInputStream(inputFile), 8 * 1024, 4 * 1024); + inputStream = new ReadAheadInputStream( + new NioBufferedFileInputStream(inputFile), 8 * 1024, 4 * 1024); } } http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/dev/checkstyle.xml ---------------------------------------------------------------------- diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml index fd73ca7..68aee4d 100644 --- a/dev/checkstyle.xml +++ b/dev/checkstyle.xml @@ -52,20 +52,6 @@ <property name="file" value="dev/checkstyle-suppressions.xml"/> </module> - <!-- - If you wish to turn off checking for a section of code, you can put a comment in the source - before and after the section, with the following syntax: - - // checkstyle:off no.XXX (such as checkstyle.off: NoFinalizer) - ... // stuff that breaks the styles - // checkstyle:on - --> - <module name="SuppressionCommentFilter"> - <property name="offCommentFormat" value="checkstyle.off\: ([\w\|]+)"/> - <property name="onCommentFormat" value="checkstyle.on\: ([\w\|]+)"/> - <property name="checkFormat" value="$1"/> - </module> - <!-- Checks for whitespace --> <!-- See http://checkstyle.sf.net/config_whitespace.html --> <module name="FileTabCharacter"> @@ -81,6 +67,19 @@ <module name="NewlineAtEndOfFile"/> <module name="TreeWalker"> + <!-- + If you wish to turn off checking for a section of code, you can put a comment in the source + before and after the section, with the following syntax: + + // checkstyle:off no.XXX (such as checkstyle.off: NoFinalizer) + ... // stuff that breaks the styles + // checkstyle:on + --> + <module name="SuppressionCommentFilter"> + <property name="offCommentFormat" value="checkstyle.off\: ([\w\|]+)"/> + <property name="onCommentFormat" value="checkstyle.on\: ([\w\|]+)"/> + <property name="checkFormat" value="$1"/> + </module> <module name="OuterTypeFilename"/> <module name="IllegalTokenText"> <property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/> @@ -182,6 +181,5 @@ <module name="UnusedImports"/> <module name="RedundantImport"/> <module name="RedundantModifier"/> - <module name="FileContentsHolder"/> </module> </module> http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/dev/test-dependencies.sh ---------------------------------------------------------------------- diff --git a/dev/test-dependencies.sh b/dev/test-dependencies.sh index 114b116..c771457 100755 --- a/dev/test-dependencies.sh +++ b/dev/test-dependencies.sh @@ -46,7 +46,7 @@ OLD_VERSION=$($MVN -q \ -Dexec.executable="echo" \ -Dexec.args='${project.version}' \ --non-recursive \ - org.codehaus.mojo:exec-maven-plugin:1.5.0:exec) + org.codehaus.mojo:exec-maven-plugin:1.6.0:exec) if [ $? != 0 ]; then echo -e "Error while getting version string from Maven:\n$OLD_VERSION" exit 1 http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index 0bbbf20..e292db9 100644 --- a/pom.xml +++ b/pom.xml @@ -1969,7 +1969,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-enforcer-plugin</artifactId> - <version>1.4.1</version> + <version>3.0.0-M1</version> <executions> <execution> <id>enforce-versions</id> @@ -2012,7 +2012,7 @@ <plugin> <groupId>net.alchim31.maven</groupId> <artifactId>scala-maven-plugin</artifactId> - <version>3.2.2</version> + <version>3.3.1</version> <executions> <execution> <id>eclipse-add-source</id> @@ -2061,7 +2061,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> - <version>3.6.1</version> + <version>3.7.0</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> @@ -2078,7 +2078,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> - <version>2.19.1</version> + <version>2.20.1</version> <!-- Note config is repeated in scalatest config --> <configuration> <includes> @@ -2222,7 +2222,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> - <version>2.10.4</version> + <version>3.0.0-M1</version> <configuration> <additionalparam>-Xdoclint:all -Xdoclint:-missing</additionalparam> <tags> @@ -2262,17 +2262,17 @@ <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>exec-maven-plugin</artifactId> - <version>1.5.0</version> + <version>1.6.0</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> - <version>3.0.0</version> + <version>3.1.0</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> - <version>3.0.0</version> + <version>3.1.0</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> @@ -2287,7 +2287,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> - <version>3.0.0</version> + <version>3.0.2</version> <executions> <execution> <id>default-cli</id> @@ -2487,10 +2487,7 @@ <artifactId>maven-checkstyle-plugin</artifactId> <version>2.17</version> <configuration> - <verbose>false</verbose> - <failOnViolation>false</failOnViolation> <includeTestSourceDirectory>true</includeTestSourceDirectory> - <failOnWarning>false</failOnWarning> <sourceDirectories>${basedir}/src/main/java,${basedir}/src/main/scala</sourceDirectories> <testSourceDirectory>${basedir}/src/test/java</testSourceDirectory> <configLocation>dev/checkstyle.xml</configLocation> @@ -2498,6 +2495,13 @@ <inputEncoding>${project.build.sourceEncoding}</inputEncoding> <outputEncoding>${project.reporting.outputEncoding}</outputEncoding> </configuration> + <dependencies> + <dependency> + <groupId>com.puppycrawl.tools</groupId> + <artifactId>checkstyle</artifactId> + <version>8.2</version> + </dependency> + </dependencies> <executions> <execution> <goals> http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java ---------------------------------------------------------------------- diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java index 48feb04..fb4d5c0 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java @@ -21,15 +21,14 @@ import java.util.List; import org.apache.spark.annotation.InterfaceStability; import org.apache.spark.sql.Row; -import org.apache.spark.sql.sources.v2.DataSourceV2Options; -import org.apache.spark.sql.sources.v2.ReadSupport; -import org.apache.spark.sql.sources.v2.ReadSupportWithSchema; import org.apache.spark.sql.types.StructType; /** * A data source reader that is returned by - * {@link ReadSupport#createReader(DataSourceV2Options)} or - * {@link ReadSupportWithSchema#createReader(StructType, DataSourceV2Options)}. + * {@link org.apache.spark.sql.sources.v2.ReadSupport#createReader( + * org.apache.spark.sql.sources.v2.DataSourceV2Options)} or + * {@link org.apache.spark.sql.sources.v2.ReadSupportWithSchema#createReader( + * StructType, org.apache.spark.sql.sources.v2.DataSourceV2Options)}. * It can mix in various query optimization interfaces to speed up the data scan. The actual scan * logic should be delegated to {@link ReadTask}s that are returned by {@link #createReadTasks()}. * http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java ---------------------------------------------------------------------- diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java index 829f9a0..d5eada8 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java @@ -23,8 +23,6 @@ import org.apache.spark.annotation.Experimental; import org.apache.spark.annotation.InterfaceStability; import org.apache.spark.sql.Row; import org.apache.spark.sql.catalyst.expressions.UnsafeRow; -import org.apache.spark.sql.sources.v2.reader.DataSourceV2Reader; -import org.apache.spark.sql.sources.v2.reader.ReadTask; /** * A mix-in interface for {@link DataSourceV2Reader}. Data source readers can implement this @@ -39,7 +37,8 @@ public interface SupportsScanUnsafeRow extends DataSourceV2Reader { @Override default List<ReadTask<Row>> createReadTasks() { - throw new IllegalStateException("createReadTasks should not be called with SupportsScanUnsafeRow."); + throw new IllegalStateException( + "createReadTasks should not be called with SupportsScanUnsafeRow."); } /** http://git-wip-us.apache.org/repos/asf/spark/blob/3d4dd14c/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java ---------------------------------------------------------------------- diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java b/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java index 50900e9..7aacf03 100644 --- a/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java +++ b/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java @@ -32,7 +32,9 @@ import org.apache.spark.sql.types.StructType; public class JavaAdvancedDataSourceV2 implements DataSourceV2, ReadSupport { - class Reader implements DataSourceV2Reader, SupportsPushDownRequiredColumns, SupportsPushDownFilters { + class Reader implements DataSourceV2Reader, SupportsPushDownRequiredColumns, + SupportsPushDownFilters { + private StructType requiredSchema = new StructType().add("i", "int").add("j", "int"); private Filter[] filters = new Filter[0]; --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
