Remove some HadoopIO.Read.Bound factory methods and fluent setters; always set key/value at creation
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/b2f495ec Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/b2f495ec Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/b2f495ec Branch: refs/heads/master Commit: b2f495ecb98f4cd67c6676682f848af717e2aeb4 Parents: 2820534 Author: Sean Owen <so...@cloudera.com> Authored: Mon Jul 6 12:23:07 2015 +0100 Committer: Tom White <t...@cloudera.com> Committed: Thu Mar 10 11:15:14 2016 +0000 ---------------------------------------------------------------------- .../com/cloudera/dataflow/hadoop/HadoopIO.java | 29 ++------------------ .../spark/HadoopFileFormatPipelineTest.java | 5 ++-- 2 files changed, 4 insertions(+), 30 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/b2f495ec/runners/spark/src/main/java/com/cloudera/dataflow/hadoop/HadoopIO.java ---------------------------------------------------------------------- diff --git a/runners/spark/src/main/java/com/cloudera/dataflow/hadoop/HadoopIO.java b/runners/spark/src/main/java/com/cloudera/dataflow/hadoop/HadoopIO.java index 5873b9f..803c495 100644 --- a/runners/spark/src/main/java/com/cloudera/dataflow/hadoop/HadoopIO.java +++ b/runners/spark/src/main/java/com/cloudera/dataflow/hadoop/HadoopIO.java @@ -32,21 +32,8 @@ public final class HadoopIO { private Read() { } - public static <K, V> Bound<K, V> from(String filepattern) { - return new Bound<K, V>().from(filepattern); - } - - public static <K, V> Bound<K, V> withFormatClass( - Class<? extends FileInputFormat<K, V>> format) { - return new Bound<K, V>().withFormatClass(format); - } - - public static <K, V> Bound<K, V> withKeyClass(Class<K> key) { - return new Bound<K, V>().withKeyClass(key); - } - - public static <K, V> Bound<K, V> withValueClass(Class<V> value) { - return new Bound<K, V>().withValueClass(value); + public static <K, V> Bound<K, V> withKeyValueClass(Class<K> key, Class<V> value) { + return new Bound<>(null, null, key, value); } public static class Bound<K, V> extends PTransform<PInput, PCollection<KV<K, V>>> { @@ -56,10 +43,6 @@ public final class HadoopIO { private final Class<K> keyClass; private final Class<V> valueClass; - Bound() { - this(null, null, null, null); - } - Bound(String filepattern, Class<? extends FileInputFormat<K, V>> format, Class<K> key, Class<V> value) { this.filepattern = filepattern; @@ -76,14 +59,6 @@ public final class HadoopIO { return new Bound<>(filepattern, format, keyClass, valueClass); } - public Bound<K, V> withKeyClass(Class<K> key) { - return new Bound<>(filepattern, formatClass, key, valueClass); - } - - public Bound<K, V> withValueClass(Class<V> value) { - return new Bound<>(filepattern, formatClass, keyClass, value); - } - public String getFilepattern() { return filepattern; } http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/b2f495ec/runners/spark/src/test/java/com/cloudera/dataflow/spark/HadoopFileFormatPipelineTest.java ---------------------------------------------------------------------- diff --git a/runners/spark/src/test/java/com/cloudera/dataflow/spark/HadoopFileFormatPipelineTest.java b/runners/spark/src/test/java/com/cloudera/dataflow/spark/HadoopFileFormatPipelineTest.java index 1fd8e41..127d58f 100644 --- a/runners/spark/src/test/java/com/cloudera/dataflow/spark/HadoopFileFormatPipelineTest.java +++ b/runners/spark/src/test/java/com/cloudera/dataflow/spark/HadoopFileFormatPipelineTest.java @@ -68,9 +68,8 @@ public class HadoopFileFormatPipelineTest { Class<? extends FileInputFormat<IntWritable, Text>> inputFormatClass = (Class<? extends FileInputFormat<IntWritable, Text>>) (Class<?>) SequenceFileInputFormat.class; HadoopIO.Read.Bound<IntWritable,Text> bound = - HadoopIO.Read.<IntWritable,Text>from(inputFile.getAbsolutePath()) - .withKeyClass(IntWritable.class) - .withValueClass(Text.class) + HadoopIO.Read.withKeyValueClass(IntWritable.class, Text.class). + from(inputFile.getAbsolutePath()) .withFormatClass(inputFormatClass); PCollection<KV<IntWritable, Text>> input = p.apply(bound); input.apply(ParDo.of(new TabSeparatedString()))