Repository: beam Updated Branches: refs/heads/master 7bc2938a2 -> 65135fd7a
[BEAM-1778] Clean up pass of dataflow/google references/URLs in Java SDK Project: http://git-wip-us.apache.org/repos/asf/beam/repo Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/e1b339ca Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/e1b339ca Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/e1b339ca Branch: refs/heads/master Commit: e1b339cae90ebf78f20d44aa875c8770c41de944 Parents: 7bc2938 Author: melissa <[email protected]> Authored: Tue Mar 21 17:06:38 2017 -0700 Committer: Ahmet Altay <[email protected]> Committed: Wed Mar 22 17:28:41 2017 -0700 ---------------------------------------------------------------------- .../src/main/java/org/apache/beam/sdk/io/AvroIO.java | 14 ++++++-------- .../main/java/org/apache/beam/sdk/io/AvroSource.java | 2 +- .../src/main/java/org/apache/beam/sdk/io/Sink.java | 2 +- .../src/main/java/org/apache/beam/sdk/io/TextIO.java | 4 ++-- .../org/apache/beam/sdk/options/BigQueryOptions.java | 4 ++-- .../beam/sdk/options/CloudResourceManagerOptions.java | 4 ++-- .../java/org/apache/beam/sdk/options/GcsOptions.java | 2 +- .../beam/sdk/options/GoogleApiDebugOptions.java | 10 +++++----- .../org/apache/beam/sdk/options/PubsubOptions.java | 10 +++++----- .../apache/beam/sdk/testing/SerializableMatcher.java | 2 +- .../apache/beam/sdk/testing/SerializableMatchers.java | 2 +- .../java/org/apache/beam/sdk/transforms/Combine.java | 2 +- .../java/org/apache/beam/sdk/transforms/Create.java | 4 +--- .../org/apache/beam/sdk/transforms/PTransform.java | 2 +- .../java/org/apache/beam/sdk/transforms/ParDo.java | 5 +++-- .../java/org/apache/beam/sdk/util/TimeDomain.java | 2 +- sdks/java/core/src/main/proto/README.md | 3 --- .../apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java | 2 +- .../apache/beam/sdk/io/gcp/bigtable/BigtableIO.java | 6 +++--- .../apache/beam/sdk/io/gcp/datastore/DatastoreV1.java | 2 +- .../beam/sdk/transforms/MapElementsJava8Test.java | 4 ++-- sdks/java/javadoc/overview.html | 2 +- 22 files changed, 42 insertions(+), 48 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java index 67a4381..96f0a50 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java @@ -76,8 +76,7 @@ import org.apache.beam.sdk.values.PDone; * p.apply(AvroIO.Read.from("/path/to/file.avro") * .withSchema(AvroAutoGenClass.class)); * - * // A Read from a GCS file (runs locally and via the Google Cloud - * // Dataflow service): + * // A Read from a GCS file (runs locally and using remote execution): * Schema schema = new Schema.Parser().parse(new File("schema.avsc")); * PCollection<GenericRecord> records = * p.apply(AvroIO.Read @@ -106,8 +105,7 @@ import org.apache.beam.sdk.values.PDone; * records.apply(AvroIO.Write.to("/path/to/file.avro") * .withSchema(AvroAutoGenClass.class)); * - * // A Write to a sharded GCS file (runs locally and via the Google Cloud - * // Dataflow service): + * // A Write to a sharded GCS file (runs locally and using remote execution): * Schema schema = new Schema.Parser().parse(new File("schema.avsc")); * PCollection<GenericRecord> records = ...; * records.apply("WriteToAvro", AvroIO.Write @@ -122,7 +120,7 @@ import org.apache.beam.sdk.values.PDone; * * <h3>Permissions</h3> * Permission requirements depend on the {@link PipelineRunner} that is used to execute the - * Dataflow job. Please refer to the documentation of corresponding {@link PipelineRunner}s for + * pipeline. Please refer to the documentation of corresponding {@link PipelineRunner}s for * more details. */ public class AvroIO { @@ -138,8 +136,8 @@ public class AvroIO { * with the given name or pattern. This can be a local filename * or filename pattern (if running locally), or a Google Cloud * Storage filename or filename pattern of the form - * {@code "gs://<bucket>/<filepath>"} (if running locally or via - * the Google Cloud Dataflow service). Standard + * {@code "gs://<bucket>/<filepath>"} (if running locally or + * using remote execution). Standard * <a href="http://docs.oracle.com/javase/tutorial/essential/io/find.html">Java * Filesystem glob patterns</a> ("*", "?", "[..]") are supported. */ @@ -360,7 +358,7 @@ public class AvroIO { * with the given prefix. This can be a local filename * (if running locally), or a Google Cloud Storage filename of * the form {@code "gs://<bucket>/<filepath>"} - * (if running locally or via the Google Cloud Dataflow service). + * (if running locally or using remote execution). * * <p>The files written will begin with this prefix, followed by * a shard identifier (see {@link Bound#withNumShards}, and end http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java index 2ba793a..fe3ac5c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java @@ -122,7 +122,7 @@ import org.apache.commons.compress.utils.CountingInputStream; * <h3>Permissions</h3> * * <p>Permission requirements depend on the {@link PipelineRunner} that is used to execute the - * Dataflow job. Please refer to the documentation of corresponding {@link PipelineRunner}s for + * pipeline. Please refer to the documentation of corresponding {@link PipelineRunner}s for * more details. * * @param <T> The type of records to be read from the source. http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java index d1770c8..6742784 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java @@ -36,7 +36,7 @@ import org.apache.beam.sdk.values.PCollection; * etc.) * </ol> * - * <p>The {@link Write} transform can be used in a Dataflow pipeline to perform this write. + * <p>The {@link Write} transform can be used in a pipeline to perform this write. * Specifically, a Write transform can be applied to a {@link PCollection} {@code p} by: * * <p>{@code p.apply(Write.to(new MySink()));} http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java index f8943a5..58b55a9 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java @@ -116,7 +116,7 @@ public class TextIO { * Returns a transform for reading text files that reads from the file(s) * with the given filename or filename pattern. This can be a local path (if running locally), * or a Google Cloud Storage filename or filename pattern of the form - * {@code "gs://<bucket>/<filepath>"} (if running locally or via the Google Cloud Dataflow + * {@code "gs://<bucket>/<filepath>"} (if running locally or using remote execution) * service). Standard <a href="http://docs.oracle.com/javase/tutorial/essential/io/find.html" * >Java Filesystem glob patterns</a> ("*", "?", "[..]") are supported. */ @@ -342,7 +342,7 @@ public class TextIO { * with the given prefix. This can be a local filename * (if running locally), or a Google Cloud Storage filename of * the form {@code "gs://<bucket>/<filepath>"} - * (if running locally or via the Google Cloud Dataflow service). + * (if running locally or using remote execution). * * <p>The files written will begin with this prefix, followed by * a shard identifier (see {@link Bound#withNumShards(int)}, and end http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/options/BigQueryOptions.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/BigQueryOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/BigQueryOptions.java index db7b69c..7672cd7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/BigQueryOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/BigQueryOptions.java @@ -18,9 +18,9 @@ package org.apache.beam.sdk.options; /** - * Properties needed when using BigQuery with the Dataflow SDK. + * Properties needed when using Google BigQuery with the Apache Beam SDK. */ -@Description("Options that are used to configure BigQuery. See " +@Description("Options that are used to configure Google BigQuery. See " + "https://cloud.google.com/bigquery/what-is-bigquery for details on BigQuery.") public interface BigQueryOptions extends ApplicationNameOptions, GcpOptions, PipelineOptions, StreamingOptions { http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/options/CloudResourceManagerOptions.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/CloudResourceManagerOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/CloudResourceManagerOptions.java index ed532db..13fdaf3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/CloudResourceManagerOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/CloudResourceManagerOptions.java @@ -21,9 +21,9 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.beam.sdk.util.GcpProjectUtil; /** - * Properties needed when using CloudResourceManager with the Beam SDK. + * Properties needed when using Google CloudResourceManager with the Apache Beam SDK. */ -@Description("Options that are used to configure CloudResourceManager. See " +@Description("Options that are used to configure Google CloudResourceManager. See " + "https://cloud.google.com/resource-manager/ for details on CloudResourceManager.") public interface CloudResourceManagerOptions extends ApplicationNameOptions, GcpOptions, PipelineOptions, StreamingOptions { http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java index 72e106d..2187e7d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java @@ -112,7 +112,7 @@ public interface GcsOptions extends void setPathValidator(PathValidator validator); /** - * Returns the default {@link ExecutorService} to use within the Dataflow SDK. The + * Returns the default {@link ExecutorService} to use within the Apache Beam SDK. The * {@link ExecutorService} is compatible with AppEngine. */ class ExecutorServiceFactory implements DefaultValueFactory<ExecutorService> { http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java index 09ab0f5..f9cb575 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java @@ -25,12 +25,12 @@ import java.util.HashMap; import java.util.Map; /** - * These options configure debug settings for Google API clients created within the Dataflow SDK. + * These options configure debug settings for Google API clients created within the Apache Beam SDK. */ public interface GoogleApiDebugOptions extends PipelineOptions { /** - * This option enables tracing of API calls to Google services used within the - * Dataflow SDK. Values are expected in JSON format <code>{"ApiName":"TraceDestination",...} + * This option enables tracing of API calls to Google services used within the Apache + * Beam SDK. Values are expected in JSON format <code>{"ApiName":"TraceDestination",...} * </code> where the {@code ApiName} represents the request classes canonical name. The * {@code TraceDestination} is a logical trace consumer to whom the trace will be reported. * Typically, "producer" is the right destination to use: this makes API traces available to the @@ -39,8 +39,8 @@ public interface GoogleApiDebugOptions extends PipelineOptions { * <code>{"Dataflow":"producer"}</code>, all calls to the Dataflow service will be made available * to Google, specifically to the Google Cloud Dataflow team. */ - @Description("This option enables tracing of API calls to Google services used within the " - + "Dataflow SDK. Values are expected in JSON format {\"ApiName\":\"TraceDestination\",...} " + @Description("This option enables tracing of API calls to Google services used within the Apache " + + "Beam SDK. Values are expected in JSON format {\"ApiName\":\"TraceDestination\",...} " + "where the ApiName represents the request classes canonical name. The TraceDestination is " + "a logical trace consumer to whom the trace will be reported. Typically, \"producer\" is " + "the right destination to use: this makes API traces available to the team offering the " http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PubsubOptions.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PubsubOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PubsubOptions.java index f84e47b..b065d19 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PubsubOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PubsubOptions.java @@ -18,17 +18,17 @@ package org.apache.beam.sdk.options; /** - * Properties that can be set when using Pubsub with the Beam SDK. + * Properties that can be set when using Google Cloud Pub/Sub with the Apache Beam SDK. */ -@Description("Options that are used to configure BigQuery. See " - + "https://cloud.google.com/bigquery/what-is-bigquery for details on BigQuery.") +@Description("Options that are used to configure Google Cloud Pub/Sub. See " + + "https://cloud.google.com/pubsub/docs/overview for details on Cloud Pub/Sub.") public interface PubsubOptions extends ApplicationNameOptions, GcpOptions, PipelineOptions, StreamingOptions { /** - * Root URL for use with the Pubsub API. + * Root URL for use with the Google Cloud Pub/Sub API. */ - @Description("Root URL for use with the Pubsub API") + @Description("Root URL for use with the Google Cloud Pub/Sub API") @Default.String("https://pubsub.googleapis.com") @Hidden String getPubsubRootUrl(); http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java index 4e4299d..bce8142 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java @@ -25,7 +25,7 @@ import org.hamcrest.Matcher; /** * A {@link Matcher} that is also {@link Serializable}. * - * <p>Such matchers can be used with {@link PAssert}, which builds Dataflow pipelines + * <p>Such matchers can be used with {@link PAssert}, which builds pipelines * such that these matchers may be serialized and executed remotely. * * <p>To create a {@code SerializableMatcher}, extend {@link org.hamcrest.BaseMatcher} http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java index af8f49d..7f72805 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java @@ -1044,7 +1044,7 @@ class SerializableMatchers implements Serializable { * with a {@link SerializableSupplier#get()} method that returns a fresh instance of the * {@link Matcher} desired. The resulting {@link SerializableMatcher} will behave according to * the {@link Matcher} returned by {@link SerializableSupplier#get() get()} when it is invoked - * during matching (which may occur on another machine, such as a Dataflow worker). + * during matching (which may occur on another machine). * * <pre>{@code * return fromSupplier(new SerializableSupplier<Matcher<T>>() { http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java index b4626e7..3215ffa 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java @@ -76,7 +76,7 @@ import org.apache.beam.sdk.values.TypeDescriptor; * {@code PTransform}s for combining {@code PCollection} elements * globally and per-key. * - * <p>See the <a href="https://cloud.google.com/dataflow/model/combine">documentation</a> + * <p>See the <a href="https://beam.apache.org/documentation/programming-guide/#transforms-combine">documentation</a> * for how to use the operations in this class. */ public class Combine { http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java index fa3b412..c074502 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java @@ -83,9 +83,7 @@ import org.joda.time.Instant; * needs to be created without dependencies on files or other external * entities. This is especially useful during testing. * - * <p>Caveat: {@code Create} only supports small in-memory datasets, - * particularly when submitting jobs to the Google Cloud Dataflow - * service. + * <p>Caveat: {@code Create} only supports small in-memory datasets. * * @param <T> the type of the elements of the resulting {@code PCollection} */ http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java index dd7e96e..27bb219 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java @@ -162,7 +162,7 @@ import org.apache.beam.sdk.values.TypedPValue; * operations that do not save or restore any state. * * @see <a href= - * "https://cloud.google.com/dataflow/java-sdk/applying-transforms" + * "https://beam.apache.org/documentation/programming-guide/#transforms" * >Applying Transformations</a> * * @param <InputT> the type of the input to this PTransform http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java index 9225231..76c06b6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java @@ -436,8 +436,9 @@ import org.apache.beam.sdk.values.TypedPValue; * this modular, composable style, trusting to the runner to * "flatten out" all the compositions into highly optimized stages. * - * @see <a href="https://cloud.google.com/dataflow/model/par-do">the web - * documentation for ParDo</a> + * @see <a href= + * "https://beam.apache.org/documentation/programming-guide/#transforms-pardo"> + * the web documentation for ParDo</a> */ public class ParDo { http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java index 4c93e56..b067ab8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java @@ -30,7 +30,7 @@ public enum TimeDomain { /** * The {@code PROCESSING_TIME} domain corresponds to the current to the current (system) time. - * This is advanced during execution of the Dataflow pipeline. + * This is advanced during execution of the pipeline. */ PROCESSING_TIME, http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/core/src/main/proto/README.md ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/proto/README.md b/sdks/java/core/src/main/proto/README.md index 858441b..b6d91df 100644 --- a/sdks/java/core/src/main/proto/README.md +++ b/sdks/java/core/src/main/proto/README.md @@ -17,9 +17,6 @@ under the License. --> -This directory contains the Protocol Buffer messages used in Google Cloud -Dataflow. - ## Protocol Buffers in Apache Beam This directory contains the Protocol Buffer messages used in Apache Beam. http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java index 03e18e6..d195afd 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java @@ -240,7 +240,7 @@ import org.slf4j.LoggerFactory; * <h3>Permissions</h3> * * <p>Permission requirements depend on the {@link PipelineRunner} that is used to execute the - * Dataflow job. Please refer to the documentation of corresponding {@link PipelineRunner}s for + * pipeline. Please refer to the documentation of corresponding {@link PipelineRunner}s for * more details. * * <p>Please see <a href="https://cloud.google.com/bigquery/access-control">BigQuery Access Control http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java index 7091e15..2a8de82 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java @@ -69,7 +69,7 @@ import org.slf4j.LoggerFactory; /** * A bounded source and sink for Google Cloud Bigtable. * - * <p>For more information, see the online documentation at + * <p>For more information about Cloud Bigtable, see the online documentation at * <a href="https://cloud.google.com/bigtable/">Google Cloud Bigtable</a>. * * <h3>Reading from Cloud Bigtable</h3> @@ -141,13 +141,13 @@ import org.slf4j.LoggerFactory; * <h3>Experimental</h3> * * <p>This connector for Cloud Bigtable is considered experimental and may break or receive - * backwards-incompatible changes in future versions of the Cloud Dataflow SDK. Cloud Bigtable is + * backwards-incompatible changes in future versions of the Apache Beam SDK. Cloud Bigtable is * in Beta, and thus it may introduce breaking changes in future revisions of its service or APIs. * * <h3>Permissions</h3> * * <p>Permission requirements depend on the {@link PipelineRunner} that is used to execute the - * Dataflow job. Please refer to the documentation of corresponding + * pipeline. Please refer to the documentation of corresponding * {@link PipelineRunner PipelineRunners} for more details. */ @Experimental http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java index d07fd50..400860f 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java @@ -182,7 +182,7 @@ import org.slf4j.LoggerFactory; * * <h3>Permissions</h3> * Permission requirements depend on the {@code PipelineRunner} that is used to execute the - * Dataflow job. Please refer to the documentation of corresponding {@code PipelineRunner}s for + * pipeline. Please refer to the documentation of corresponding {@code PipelineRunner}s for * more details. * * <p>Please see <a href="https://cloud.google.com/datastore/docs/activate">Cloud Datastore Sign Up http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java ---------------------------------------------------------------------- diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java index 7e63a7d..e0e9d9b4 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java @@ -46,7 +46,7 @@ public class MapElementsJava8Test implements Serializable { PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply(MapElements - // Note that the type annotation is required (for Java, not for Dataflow) + // Note that the type annotation is required. .via((Integer i) -> i * 2) .withOutputType(new TypeDescriptor<Integer>() {})); @@ -81,7 +81,7 @@ public class MapElementsJava8Test implements Serializable { PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply(MapElements - // Note that the type annotation is required (for Java, not for Dataflow) + // Note that the type annotation is required. .via(new Doubler()::doubleIt) .withOutputType(new TypeDescriptor<Integer>() {})); http://git-wip-us.apache.org/repos/asf/beam/blob/e1b339ca/sdks/java/javadoc/overview.html ---------------------------------------------------------------------- diff --git a/sdks/java/javadoc/overview.html b/sdks/java/javadoc/overview.html index 511eeb4..66d4ab6 100644 --- a/sdks/java/javadoc/overview.html +++ b/sdks/java/javadoc/overview.html @@ -22,7 +22,7 @@ <body> <p>The Apache Beam SDK for Java provides a simple and elegant programming model to express your data processing pipelines; - see <a href="https://cloud.google.com/dataflow/">our product page</a> + see the <a href="https://beam.apache.org/">Apache Beam website</a> for more information and getting started instructions.</p> <p>The easiest way to use the Apache Beam SDK for Java is via
