Repository: incubator-beam
Updated Branches:
  refs/heads/master 4f580f5f1 -> 9abd0926a


Remove many definitions of named methods

Specifically, remove the occurrences in:
  - Window
  - AvroIO
  - PubsubIO
  - TextIO
  - BigQueryIO
  - Read


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/fc52a102
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/fc52a102
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/fc52a102

Branch: refs/heads/master
Commit: fc52a10259cd045f4b55ec59b2ae87c02c926ed4
Parents: 5719535
Author: Ben Chambers <bchamb...@google.com>
Authored: Thu Jun 23 17:55:24 2016 -0700
Committer: Ben Chambers <bchamb...@google.com>
Committed: Sun Jun 26 10:06:35 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/sdk/io/AvroIO.java     | 53 ++++---------------
 .../java/org/apache/beam/sdk/io/BigQueryIO.java | 42 +--------------
 .../java/org/apache/beam/sdk/io/PubsubIO.java   | 35 +------------
 .../main/java/org/apache/beam/sdk/io/Read.java  | 29 +----------
 .../java/org/apache/beam/sdk/io/TextIO.java     | 55 ++++----------------
 .../org/apache/beam/sdk/io/package-info.java    |  6 +--
 .../beam/sdk/transforms/windowing/Window.java   | 42 ---------------
 7 files changed, 25 insertions(+), 237 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fc52a102/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
index 4b40c01..604051b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
@@ -55,9 +55,7 @@ import javax.annotation.Nullable;
  * {@link AvroIO.Read}, specifying {@link AvroIO.Read#from} to specify
  * the path of the file(s) to read from (e.g., a local filename or
  * filename pattern if running locally, or a Google Cloud Storage
- * filename or filename pattern of the form
- * {@code "gs://<bucket>/<filepath>"}), and optionally
- * {@link AvroIO.Read#named} to specify the name of the pipeline step.
+ * filename or filename pattern of the form {@code 
"gs://<bucket>/<filepath>"}).
  *
  * <p>It is required to specify {@link AvroIO.Read#withSchema}. To
  * read specific records, such as Avro-generated classes, provide an
@@ -73,15 +71,15 @@ import javax.annotation.Nullable;
  * // A simple Read of a local file (only runs locally):
  * PCollection<AvroAutoGenClass> records =
  *     p.apply(AvroIO.Read.from("/path/to/file.avro")
- *                        .withSchema(AvroAutoGenClass.class));
+ *                 .withSchema(AvroAutoGenClass.class));
  *
  * // A Read from a GCS file (runs locally and via the Google Cloud
  * // Dataflow service):
  * Schema schema = new Schema.Parser().parse(new File("schema.avsc"));
  * PCollection<GenericRecord> records =
- *     p.apply(AvroIO.Read.named("ReadFromAvro")
- *                        .from("gs://my_bucket/path/to/records-*.avro")
- *                        .withSchema(schema));
+ *     p.apply(AvroIO.Read
+ *                .from("gs://my_bucket/path/to/records-*.avro")
+ *                .withSchema(schema));
  * } </pre>
  *
  * <p>To write a {@link PCollection} to one or more Avro files, use
@@ -110,10 +108,10 @@ import javax.annotation.Nullable;
  * // Dataflow service):
  * Schema schema = new Schema.Parser().parse(new File("schema.avsc"));
  * PCollection<GenericRecord> records = ...;
- * records.apply(AvroIO.Write.named("WriteToAvro")
- *                           .to("gs://my_bucket/path/to/numbers")
- *                           .withSchema(schema)
- *                           .withSuffix(".avro"));
+ * records.apply("WriteToAvro", AvroIO.Write
+ *     .to("gs://my_bucket/path/to/numbers")
+ *     .withSchema(schema)
+ *     .withSuffix(".avro"));
  * } </pre>
  *
  * <p><h3>Permissions</h3>
@@ -128,12 +126,6 @@ public class AvroIO {
    * the decoding of each record.
    */
   public static class Read {
-    /**
-     * Returns a {@link PTransform} with the given step name.
-     */
-    public static Bound<GenericRecord> named(String name) {
-      return new Bound<>(GenericRecord.class).named(name);
-    }
 
     /**
      * Returns a {@link PTransform} that reads from the file(s)
@@ -223,16 +215,6 @@ public class AvroIO {
 
       /**
        * Returns a new {@link PTransform} that's like this one but
-       * with the given step name.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound<T> named(String name) {
-        return new Bound<>(name, filepattern, type, schema, validate);
-      }
-
-      /**
-       * Returns a new {@link PTransform} that's like this one but
        * that reads from the file(s) with the given name or pattern.
        * (See {@link AvroIO.Read#from} for a description of
        * filepatterns.)
@@ -366,12 +348,6 @@ public class AvroIO {
    * multiple Avro files matching a sharding pattern).
    */
   public static class Write {
-    /**
-     * Returns a {@link PTransform} with the given step name.
-     */
-    public static Bound<GenericRecord> named(String name) {
-      return new Bound<>(GenericRecord.class).named(name);
-    }
 
     /**
      * Returns a {@link PTransform} that writes to the file(s)
@@ -522,17 +498,6 @@ public class AvroIO {
 
       /**
        * Returns a new {@link PTransform} that's like this one but
-       * with the given step name.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound<T> named(String name) {
-        return new Bound<>(
-            name, filenamePrefix, filenameSuffix, numShards, shardTemplate, 
type, schema, validate);
-      }
-
-      /**
-       * Returns a new {@link PTransform} that's like this one but
        * that writes to the file(s) with the given filename prefix.
        *
        * <p>See {@link AvroIO.Write#to(String)} for more information

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fc52a102/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BigQueryIO.java
----------------------------------------------------------------------
diff --git 
a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BigQueryIO.java 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BigQueryIO.java
index 7cac705..a9d85b8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BigQueryIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BigQueryIO.java
@@ -162,8 +162,7 @@ import javax.annotation.Nullable;
  * This produces a {@link PCollection} of {@link TableRow TableRows} as output:
  * <pre>{@code
  * PCollection<TableRow> shakespeare = pipeline.apply(
- *     BigQueryIO.Read.named("Read")
- *                    
.from("clouddataflow-readonly:samples.weather_stations"));
+ *     
BigQueryIO.Read.from("clouddataflow-readonly:samples.weather_stations"));
  * }</pre>
  *
  * <p>See {@link TableRow} for more information on the {@link TableRow} object.
@@ -174,8 +173,7 @@ import javax.annotation.Nullable;
  *
  * <pre>{@code
  * PCollection<TableRow> shakespeare = pipeline.apply(
- *     BigQueryIO.Read.named("Read")
- *                    .fromQuery("SELECT year, mean_temp FROM 
samples.weather_stations"));
+ *     BigQueryIO.Read.fromQuery("SELECT year, mean_temp FROM 
samples.weather_stations"));
  * }</pre>
  *
  * <p>When creating a BigQuery input transform, users should provide either a 
query or a table.
@@ -193,7 +191,6 @@ import javax.annotation.Nullable;
  * TableSchema schema = new TableSchema().setFields(fields);
  *
  * quotes.apply(BigQueryIO.Write
- *     .named("Write")
  *     .to("my-project:output.output_table")
  *     .withSchema(schema)
  *     
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
@@ -214,7 +211,6 @@ import javax.annotation.Nullable;
  * PCollection<TableRow> quotes = ...
  * quotes.apply(Window.<TableRow>into(CalendarWindows.days(1)))
  *       .apply(BigQueryIO.Write
- *         .named("Write")
  *         .withSchema(schema)
  *         .to(new SerializableFunction<BoundedWindow, String>() {
  *           public String apply(BoundedWindow window) {
@@ -345,13 +341,6 @@ public class BigQueryIO {
    * }}</pre>
    */
   public static class Read {
-    /**
-     * Returns a {@link Read.Bound} with the given name. The BigQuery table or 
query to be read
-     * from has not yet been configured.
-     */
-    public static Bound named(String name) {
-      return new Bound().named(name);
-    }
 
     /**
      * Reads a BigQuery table specified as {@code 
"[project_id]:[dataset_id].[table_id]"} or
@@ -429,15 +418,6 @@ public class BigQueryIO {
       }
 
       /**
-       * Returns a copy of this transform using the name associated with this 
transformation.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound named(String name) {
-        return new Bound(name, query, jsonTableRef, validate, flattenResults, 
testBigQueryServices);
-      }
-
-      /**
        * Returns a copy of this transform that reads from the specified table. 
Refer to
        * {@link #parseTableSpec(String)} for the specification format.
        *
@@ -1372,14 +1352,6 @@ public class BigQueryIO {
     }
 
     /**
-     * Creates a write transformation with the given transform name. The 
BigQuery table to be
-     * written has not yet been configured.
-     */
-    public static Bound named(String name) {
-      return new Bound().named(name);
-    }
-
-    /**
      * Creates a write transformation for the given table specification.
      *
      * <p>Refer to {@link #parseTableSpec(String)} for the specification 
format.
@@ -1522,16 +1494,6 @@ public class BigQueryIO {
       }
 
       /**
-       * Returns a copy of this write transformation, but with the specified 
transform name.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound named(String name) {
-        return new Bound(name, jsonTableRef, tableRefFunction, jsonSchema, 
createDisposition,
-            writeDisposition, validate, testBigQueryServices);
-      }
-
-      /**
        * Returns a copy of this write transformation, but writing to the 
specified table. Refer to
        * {@link #parseTableSpec(String)} for the specification format.
        *

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fc52a102/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
index c6de8b4..ecb1f0a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
@@ -55,6 +55,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+
 import javax.annotation.Nullable;
 
 /**
@@ -369,13 +370,6 @@ public class PubsubIO {
    * {@link Bound#maxNumRecords(int)} or {@link Bound#maxReadTime(Duration)} 
must be set.
    */
   public static class Read {
-    /**
-     * Creates and returns a transform for reading from Cloud Pub/Sub with the 
specified transform
-     * name.
-     */
-    public static Bound<String> named(String name) {
-      return new Bound<>(DEFAULT_PUBSUB_CODER).named(name);
-    }
 
     /**
      * Creates and returns a transform for reading from a Cloud Pub/Sub topic. 
Mutually exclusive
@@ -531,16 +525,6 @@ public class PubsubIO {
       }
 
       /**
-       * Returns a transform that's like this one but with the given step name.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound<T> named(String name) {
-        return new Bound<>(
-            name, subscription, topic, timestampLabel, coder, idLabel, 
maxNumRecords, maxReadTime);
-      }
-
-      /**
        * Returns a transform that's like this one but reading from the
        * given subscription.
        *
@@ -834,13 +818,6 @@ public class PubsubIO {
   // TODO: Support non-String encodings.
   public static class Write {
     /**
-     * Creates a transform that writes to Pub/Sub with the given step name.
-     */
-    public static Bound<String> named(String name) {
-      return new Bound<>(DEFAULT_PUBSUB_CODER).named(name);
-    }
-
-    /**
      * Creates a transform that publishes to the specified topic.
      *
      * <p>See {@link PubsubIO.PubsubTopic#fromPath(String)} for more details 
on the format of the
@@ -917,16 +894,6 @@ public class PubsubIO {
       }
 
       /**
-       * Returns a new transform that's like this one but with the specified 
step
-       * name.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound<T> named(String name) {
-        return new Bound<>(name, topic, timestampLabel, idLabel, coder);
-      }
-
-      /**
        * Returns a new transform that's like this one but that writes to the 
specified
        * topic.
        *

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fc52a102/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java
index c0440f2..e13ff06 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java
@@ -38,17 +38,10 @@ import javax.annotation.Nullable;
  * <p>Usage example:
  * <pre>
  * Pipeline p = Pipeline.create();
- * p.apply(Read.from(new MySource().withFoo("foo").withBar("bar"))
- *             .named("foobar"));
+ * p.apply(Read.from(new MySource().withFoo("foo").withBar("bar")));
  * </pre>
  */
 public class Read {
-  /**
-   * Returns a new {@code Read} {@code PTransform} builder with the given name.
-   */
-  public static Builder named(String name) {
-    return new Builder(name);
-  }
 
   /**
    * Returns a new {@code Read.Bounded} {@code PTransform} reading from the 
given
@@ -104,16 +97,6 @@ public class Read {
       this.source = SerializableUtils.ensureSerializable(source);
     }
 
-    /**
-     * Returns a new {@code Bounded} {@code PTransform} that's like this one 
but
-     * has the given name.
-     *
-     * <p>Does not modify this object.
-     */
-    public Bounded<T> named(String name) {
-      return new Bounded<T>(name, source);
-    }
-
     @Override
     protected Coder<T> getDefaultOutputCoder() {
       return source.getDefaultOutputCoder();
@@ -162,16 +145,6 @@ public class Read {
     }
 
     /**
-     * Returns a new {@code Unbounded} {@code PTransform} that's like this one 
but
-     * has the given name.
-     *
-     * <p>Does not modify this object.
-     */
-    public Unbounded<T> named(String name) {
-      return new Unbounded<T>(name, source);
-    }
-
-    /**
      * Returns a new {@link BoundedReadFromUnboundedSource} that reads a 
bounded amount
      * of data from the given {@link UnboundedSource}.  The bound is specified 
as a number
      * of records to read.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fc52a102/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java
index a7e5e29..7e7a3e6 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java
@@ -58,8 +58,7 @@ import javax.annotation.Nullable;
  * the path of the file(s) to read from (e.g., a local filename or
  * filename pattern if running locally, or a Google Cloud Storage
  * filename or filename pattern of the form
- * {@code "gs://<bucket>/<filepath>"}). You may optionally call
- * {@link TextIO.Read#named(String)} to specify the name of the pipeline step.
+ * {@code "gs://<bucket>/<filepath>"}).
  *
  * <p>By default, {@link TextIO.Read} returns a {@link PCollection} of {@link 
String Strings},
  * each corresponding to one line of an input UTF-8 text file. To convert 
directly from the raw
@@ -78,9 +77,9 @@ import javax.annotation.Nullable;
  * // A fully-specified Read from a GCS file (runs locally and via the
  * // Google Cloud Dataflow service):
  * PCollection<Integer> numbers =
- *     p.apply(TextIO.Read.named("ReadNumbers")
- *                        .from("gs://my_bucket/path/to/numbers-*.txt")
- *                        .withCoder(TextualIntegerCoder.of()));
+ *     p.apply("ReadNumbers", TextIO.Read
+ *         .from("gs://my_bucket/path/to/numbers-*.txt")
+ *         .withCoder(TextualIntegerCoder.of()));
  * }</pre>
  *
  * <p>To write a {@link PCollection} to one or more text files, use
@@ -88,9 +87,8 @@ import javax.annotation.Nullable;
  * the path of the file to write to (e.g., a local filename or sharded
  * filename pattern if running locally, or a Google Cloud Storage
  * filename or sharded filename pattern of the form
- * {@code "gs://<bucket>/<filepath>"}). You can optionally name the resulting 
transform using
- * {@link TextIO.Write#named(String)}, and you can use {@link 
TextIO.Write#withCoder(Coder)}
- * to specify the Coder to use to encode the Java values into text lines.
+ * {@code "gs://<bucket>/<filepath>"}). You can use {@link 
TextIO.Write#withCoder(Coder)}
+ * to specify the {@link Coder} to use to encode the Java values into text 
lines.
  *
  * <p>Any existing files with the same names as generated output files
  * will be overwritten.
@@ -104,10 +102,10 @@ import javax.annotation.Nullable;
  * // A fully-specified Write to a sharded GCS file (runs locally and via the
  * // Google Cloud Dataflow service):
  * PCollection<Integer> numbers = ...;
- * numbers.apply(TextIO.Write.named("WriteNumbers")
- *                           .to("gs://my_bucket/path/to/numbers")
- *                           .withSuffix(".txt")
- *                           .withCoder(TextualIntegerCoder.of()));
+ * numbers.apply("WriteNumbers", TextIO.Write
+ *      .to("gs://my_bucket/path/to/numbers")
+ *      .withSuffix(".txt")
+ *      .withCoder(TextualIntegerCoder.of()));
  * }</pre>
  *
  * <h3>Permissions</h3>
@@ -130,12 +128,6 @@ public class TextIO {
    * {@link #withCoder(Coder)} to change the return type.
    */
   public static class Read {
-    /**
-     * Returns a transform for reading text files that uses the given step 
name.
-     */
-    public static Bound<String> named(String name) {
-      return new Bound<>(DEFAULT_TEXT_CODER).named(name);
-    }
 
     /**
      * Returns a transform for reading text files that reads from the file(s)
@@ -228,16 +220,6 @@ public class TextIO {
 
       /**
        * Returns a new transform for reading from text files that's like this 
one but
-       * with the given step name.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound<T> named(String name) {
-        return new Bound<>(name, filepattern, coder, validate, 
compressionType);
-      }
-
-      /**
-       * Returns a new transform for reading from text files that's like this 
one but
        * that reads from the file(s) with the given name or pattern. See 
{@link TextIO.Read#from}
        * for a description of filepatterns.
        *
@@ -387,12 +369,6 @@ public class TextIO {
    * element of the input collection encoded into its own line.
    */
   public static class Write {
-    /**
-     * Returns a transform for writing to text files with the given step name.
-     */
-    public static Bound<String> named(String name) {
-      return new Bound<>(DEFAULT_TEXT_CODER).named(name);
-    }
 
     /**
      * Returns a transform for writing to text files that writes to the file(s)
@@ -521,17 +497,6 @@ public class TextIO {
 
       /**
        * Returns a transform for writing to text files that's like this one but
-       * with the given step name.
-       *
-       * <p>Does not modify this object.
-       */
-      public Bound<T> named(String name) {
-        return new Bound<>(name, filenamePrefix, filenameSuffix, coder, 
numShards,
-            shardTemplate, validate);
-      }
-
-      /**
-       * Returns a transform for writing to text files that's like this one but
        * that writes to the file(s) with the given filename prefix.
        *
        * <p>See {@link TextIO.Write#to(String) Write.to(String)} for more 
information.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fc52a102/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
----------------------------------------------------------------------
diff --git 
a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
index c2c0685..432c5df 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
@@ -25,14 +25,12 @@
  * from existing storage:
  * <pre>{@code
  * PCollection<TableRow> inputData = pipeline.apply(
- *     BigQueryIO.Read.named("Read")
- *                    .from("clouddataflow-readonly:samples.weather_stations");
+ *     
BigQueryIO.Read.from("clouddataflow-readonly:samples.weather_stations"));
  * }</pre>
  * and {@code Write} transforms that persist PCollections to external storage:
  * <pre> {@code
  * PCollection<Integer> numbers = ...;
- * numbers.apply(TextIO.Write.named("WriteNumbers")
- *                           .to("gs://my_bucket/path/to/numbers"));
+ * numbers.apply(TextIO.Write.to("gs://my_bucket/path/to/numbers"));
  * } </pre>
  */
 package org.apache.beam.sdk.io;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fc52a102/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
----------------------------------------------------------------------
diff --git 
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
index dde5c05..bc122e2 100644
--- 
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
+++ 
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
@@ -159,21 +159,6 @@ public class Window {
   }
 
   /**
-   * Creates a {@code Window} {@code PTransform} with the given name.
-   *
-   * <p>See the discussion of Naming in
-   * {@link org.apache.beam.sdk.transforms.ParDo} for more explanation.
-   *
-   * <p>The resulting {@code PTransform} is incomplete, and its input/output
-   * type is not yet bound.  Use {@link Window.Unbound#into} to specify the
-   * {@link WindowFn} to use, which will also bind the input/output type of 
this
-   * {@code PTransform}.
-   */
-  public static Unbound named(String name) {
-    return new Unbound().named(name);
-  }
-
-  /**
    * Creates a {@code Window} {@code PTransform} that uses the given
    * {@link WindowFn} to window the data.
    *
@@ -255,19 +240,6 @@ public class Window {
     }
 
     /**
-     * Returns a new {@code Window} transform that's like this
-     * transform but with the specified name.  Does not modify this
-     * transform.  The resulting transform is still incomplete.
-     *
-     * <p>See the discussion of Naming in
-     * {@link org.apache.beam.sdk.transforms.ParDo} for more
-     * explanation.
-     */
-    public Unbound named(String name) {
-      return new Unbound(name);
-    }
-
-    /**
      * Returns a new {@code Window} {@code PTransform} that's like this
      * transform but that will use the given {@link WindowFn}, and that has
      * its input and output types bound.  Does not modify this transform.  The
@@ -408,20 +380,6 @@ public class Window {
     }
 
     /**
-     * Returns a new {@code Window} {@code PTransform} that's like this
-     * {@code PTransform} but with the specified name.  Does not
-     * modify this {@code PTransform}.
-     *
-     * <p>See the discussion of Naming in
-     * {@link org.apache.beam.sdk.transforms.ParDo} for more
-     * explanation.
-     */
-    public Bound<T> named(String name) {
-      return new Bound<>(
-          name, windowFn, trigger, mode, allowedLateness, closingBehavior, 
outputTimeFn);
-    }
-
-    /**
      * Sets a non-default trigger for this {@code Window} {@code PTransform}.
      * Elements that are assigned to a specific window will be output when
      * the trigger fires.

Reply via email to