This is an automated email from the ASF dual-hosted git repository. lcwik pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/beam.git
commit ce740ebf229d54986fdab39493c309fca6ffb87f Author: Daniel Kulp <[email protected]> AuthorDate: Mon Jan 29 09:07:31 2018 -0500 Incorporate reviews and rebase on latest master --- .gitignore | 1 + build_rules.gradle | 12 +++--- pom.xml | 23 +++++------ .../SparkGroupAlsoByWindowViaWindowSet.java | 9 ++-- .../org/apache/beam/sdk/coders/CoderRegistry.java | 4 +- .../apache/beam/sdk/testing/WindowFnTestUtils.java | 8 ++-- .../operator/BeamSqlCaseExpression.java | 4 +- sdks/java/maven-archetypes/examples/pom.xml | 48 +++++++++++----------- 8 files changed, 56 insertions(+), 53 deletions(-) diff --git a/.gitignore b/.gitignore index 00b57a3..ff2faad 100644 --- a/.gitignore +++ b/.gitignore @@ -52,6 +52,7 @@ sdks/python/apache_beam/portability/api/*pb2*.* **/.fbExcludeFilterFile **/.apt_generated/**/* **/.settings/**/* +**/.gitignore # Ignore Visual Studio Code files. **/.vscode/**/* diff --git a/build_rules.gradle b/build_rules.gradle index fde5668..c4c6746 100644 --- a/build_rules.gradle +++ b/build_rules.gradle @@ -222,7 +222,6 @@ ext.applyJavaNature = { testApt auto_service } - // Add the optional and provided configurations for dependencies // TODO: Either remove these plugins and find another way to generate the Maven poms // with the correct dependency scopes configured. @@ -239,6 +238,13 @@ ext.applyJavaNature = { maxErrors = 0 } + // Apply the eclipse and apt-eclipse plugins. This adds the "eclipse" task and + // connects the apt-eclipse plugin to update the eclipse project files + // with the instructions needed to run apt within eclipse to handle the AutoValue + // and additional annotations + apply plugin: 'eclipse' + apply plugin: "net.ltgt.apt-eclipse" + // Enables a plugin which can apply code formatting to source. // TODO: Should this plugin be enabled for all projects? apply plugin: "com.diffplug.gradle.spotless" @@ -435,8 +441,4 @@ ext.applyAvroNature = { apply plugin: "com.commercehub.gradle.plugin.avro" } -// Apply the apt-eclipse plugin so the eclipse import can work -apply plugin: 'eclipse' -apply plugin: "net.ltgt.apt-eclipse" - diff --git a/pom.xml b/pom.xml index 9d083ab..55e852e 100644 --- a/pom.xml +++ b/pom.xml @@ -190,7 +190,6 @@ <!-- Default skipping --> <rat.skip>true</rat.skip> - </properties> <packaging>pom</packaging> @@ -1772,17 +1771,17 @@ </action> </pluginExecution> <pluginExecution> - <pluginExecutionFilter> - <groupId>net.revelc.code.formatter</groupId> - <artifactId>formatter-maven-plugin</artifactId> - <versionRange>[2.0.0,)</versionRange> - <goals> - <goal>format</goal> - </goals> - </pluginExecutionFilter> - <action> - <ignore /> - </action> + <pluginExecutionFilter> + <groupId>net.revelc.code.formatter</groupId> + <artifactId>formatter-maven-plugin</artifactId> + <versionRange>[2.0.0,)</versionRange> + <goals> + <goal>format</goal> + </goals> + </pluginExecutionFilter> + <action> + <ignore /> + </action> </pluginExecution> </pluginExecutions> </lifecycleMappingMetadata> diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java index 921d504..d03f176 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java @@ -74,6 +74,7 @@ import org.slf4j.LoggerFactory; import scala.Option; import scala.Tuple2; import scala.Tuple3; +import scala.collection.GenTraversable; import scala.collection.Iterator; import scala.collection.Seq; import scala.runtime.AbstractFunction1; @@ -272,12 +273,12 @@ public class SparkGroupAlsoByWindowViaWindowSet implements Serializable { if (!encodedKeyedElements.isEmpty()) { // new input for key. try { - //cast to GenTraversable to avoid a ambiguous call to head() which can come from - //mulitple super interfacesof Seq<byte[]> - byte[] b = ((scala.collection.GenTraversable<byte[]>) encodedKeyedElements).head(); + // cast to GenTraversable to avoid a ambiguous call to head() which can come from + // multiple super interfacesof Seq<byte[]> + byte[] headBytes = ((GenTraversable<byte[]>) encodedKeyedElements).head(); final KV<Long, Iterable<WindowedValue<InputT>>> keyedElements = CoderHelpers.fromByteArray( - b, KvCoder.of(VarLongCoder.of(), itrWvCoder)); + headBytes, KvCoder.of(VarLongCoder.of(), itrWvCoder)); final Long rddTimestamp = keyedElements.getKey(); diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java index 7c84b35..946c9bc 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java @@ -564,9 +564,9 @@ public class CoderRegistry { } for (int i = 0; i < typeArgumentCoders.size(); i++) { try { - Coder<?> c2 = typeArgumentCoders.get(i); + Coder<?> typeArgumentCoder = typeArgumentCoders.get(i); verifyCompatible( - c2, + typeArgumentCoder, candidateDescriptor.resolveType(typeArguments[i]).getType()); } catch (IncompatibleCoderException exn) { throw new IncompatibleCoderException( diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java index 532aa5a..9cb2a28 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java @@ -88,8 +88,8 @@ public class WindowFnTestUtils { for (W window : assignedWindowsWithValue(windowFn, element)) { windowSet.put(window, timestampValue(element.getTimestamp().getMillis())); } - TestMergeContext<T, W> tmc = new TestMergeContext<>(windowSet, windowFn); - windowFn.mergeWindows(tmc); + TestMergeContext<T, W> mergeContext = new TestMergeContext<>(windowSet, windowFn); + windowFn.mergeWindows(mergeContext); } Map<W, Set<String>> actual = new HashMap<>(); for (W window : windowSet.windows()) { @@ -113,8 +113,8 @@ public class WindowFnTestUtils { */ public static <T, W extends BoundedWindow> Collection<W> assignedWindowsWithValue( WindowFn<T, W> windowFn, TimestampedValue<T> timestampedValue) throws Exception { - TestAssignContext<T, W> tac = new TestAssignContext<>(timestampedValue, windowFn); - return windowFn.assignWindows(tac); + TestAssignContext<T, W> assignContext = new TestAssignContext<>(timestampedValue, windowFn); + return windowFn.assignWindows(assignContext); } private static String timestampValue(long timestamp) { diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/BeamSqlCaseExpression.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/BeamSqlCaseExpression.java index ec9ba40..c7eb156 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/BeamSqlCaseExpression.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/BeamSqlCaseExpression.java @@ -51,8 +51,8 @@ public class BeamSqlCaseExpression extends BeamSqlExpression { @Override public BeamSqlPrimitive evaluate(BeamRecord inputRow, BoundedWindow window) { for (int i = 0; i < operands.size() - 1; i += 2) { - Boolean b = opValueEvaluated(i, inputRow, window); - if (b != null && b) { + Boolean wasOpEvaluated = opValueEvaluated(i, inputRow, window); + if (wasOpEvaluated != null && wasOpEvaluated) { return BeamSqlPrimitive.of( outputType, opValueEvaluated(i + 1, inputRow, window) diff --git a/sdks/java/maven-archetypes/examples/pom.xml b/sdks/java/maven-archetypes/examples/pom.xml index 69afe5b..8e25e73 100644 --- a/sdks/java/maven-archetypes/examples/pom.xml +++ b/sdks/java/maven-archetypes/examples/pom.xml @@ -73,30 +73,30 @@ </plugin> <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.--> <plugin> - <groupId>org.eclipse.m2e</groupId> - <artifactId>lifecycle-mapping</artifactId> - <version>1.0.0</version> - <configuration> - <lifecycleMappingMetadata> - <pluginExecutions> - <pluginExecution> - <pluginExecutionFilter> - <groupId>org.codehaus.mojo</groupId> - <artifactId>exec-maven-plugin</artifactId> - <versionRange>[1.5.0,)</versionRange> - <goals> - <goal>exec</goal> - </goals> - </pluginExecutionFilter> - <action> - <execute> - <runOnIncremental>false</runOnIncremental> - </execute> - </action> - </pluginExecution> - </pluginExecutions> - </lifecycleMappingMetadata> - </configuration> + <groupId>org.eclipse.m2e</groupId> + <artifactId>lifecycle-mapping</artifactId> + <version>1.0.0</version> + <configuration> + <lifecycleMappingMetadata> + <pluginExecutions> + <pluginExecution> + <pluginExecutionFilter> + <groupId>org.codehaus.mojo</groupId> + <artifactId>exec-maven-plugin</artifactId> + <versionRange>[1.5.0,)</versionRange> + <goals> + <goal>exec</goal> + </goals> + </pluginExecutionFilter> + <action> + <execute> + <runOnIncremental>false</runOnIncremental> + </execute> + </action> + </pluginExecution> + </pluginExecutions> + </lifecycleMappingMetadata> + </configuration> </plugin> </plugins> </pluginManagement> -- To stop receiving notification emails like this one, please contact [email protected].
