Lizzfox commented on code in PR #17828:
URL: https://github.com/apache/beam/pull/17828#discussion_r958733870


##########
sdks/java/io/sparkreceiver/src/main/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIO.java:
##########
@@ -0,0 +1,162 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.io.sparkreceiver;
+
+import static org.apache.beam.sdk.util.Preconditions.checkStateNotNull;
+import static 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
+
+import com.google.auto.value.AutoValue;
+import org.apache.beam.sdk.transforms.Impulse;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.spark.streaming.receiver.Receiver;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Streaming sources for Spark {@link Receiver}.
+ *
+ * <h3>Reading using {@link SparkReceiverIO}</h3>
+ *
+ * <p>You will need to pass a {@link ReceiverBuilder} which is responsible for 
instantiating new
+ * {@link Receiver} objects.
+ *
+ * <p>{@link Receiver} that will be used should implement {@link HasOffset} 
interface. You will need
+ * to pass {@code getOffsetFn} which is a {@link SerializableFunction} that 
defines how to get
+ * {@code Long offset} from {@code V record}.
+ *
+ * <p>Optionally you can pass {@code watermarkFn} which is a {@link 
SerializableFunction} that
+ * defines how to get {@code Instant watermark} from {@code V record}.
+ *
+ * <p>Example of {@link SparkReceiverIO#read()} usage:
+ *
+ * <pre>{@code
+ * Pipeline p = ...; // Create pipeline.
+ *
+ * // Create ReceiverBuilder for CustomReceiver
+ * ReceiverBuilder<String, CustomReceiverWithOffset> receiverBuilder =
+ *         new ReceiverBuilder<>(CustomReceiver.class).withConstructorArgs();
+ *
+ * //Read from CustomReceiver
+ * p.apply("Spark Receiver Read",
+ *  SparkReceiverIO.Read<String> reader =
+ *    SparkReceiverIO.<String>read()
+ *      .withGetOffsetFn(Long::valueOf)
+ *      .withWatermarkFn(Instant::parse)
+ *      .withSparkReceiverBuilder(receiverBuilder);
+ * }</pre>
+ */
+public class SparkReceiverIO {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(SparkReceiverIO.class);
+
+  public static <V> Read<V> read() {
+    return new AutoValue_SparkReceiverIO_Read.Builder<V>().build();
+  }
+
+  /** A {@link PTransform} to read from Spark {@link Receiver}. */
+  @AutoValue
+  @AutoValue.CopyAnnotations
+  public abstract static class Read<V> extends PTransform<PBegin, 
PCollection<V>> {
+
+    abstract @Nullable ReceiverBuilder<V, ? extends Receiver<V>> 
getSparkReceiverBuilder();
+
+    abstract @Nullable SerializableFunction<V, Long> getGetOffsetFn();
+
+    abstract @Nullable SerializableFunction<V, Instant> getWatermarkFn();
+
+    abstract Builder<V> toBuilder();
+
+    @AutoValue.Builder
+    abstract static class Builder<V> {
+
+      abstract Builder<V> setSparkReceiverBuilder(
+          ReceiverBuilder<V, ? extends Receiver<V>> sparkReceiverBuilder);
+
+      abstract Builder<V> setGetOffsetFn(SerializableFunction<V, Long> 
getOffsetFn);
+
+      abstract Builder<V> setWatermarkFn(SerializableFunction<V, Instant> 
watermarkFn);
+
+      abstract Read<V> build();
+    }
+
+    /** Sets {@link ReceiverBuilder} with value and custom Spark {@link 
Receiver} class. */
+    public Read<V> withSparkReceiverBuilder(
+        ReceiverBuilder<V, ? extends Receiver<V>> sparkReceiverBuilder) {
+      checkArgument(sparkReceiverBuilder != null, "Spark receiver builder can 
not be null");
+      return toBuilder().setSparkReceiverBuilder(sparkReceiverBuilder).build();
+    }
+
+    /** A function to get offset in order to start {@link Receiver} from it. */
+    public Read<V> withGetOffsetFn(SerializableFunction<V, Long> getOffsetFn) {
+      checkArgument(getOffsetFn != null, "Get offset function can not be 
null");
+      return toBuilder().setGetOffsetFn(getOffsetFn).build();
+    }
+
+    /** A function to calculate watermark after a record. */
+    public Read<V> withWatermarkFn(SerializableFunction<V, Instant> 
watermarkFn) {
+      checkArgument(watermarkFn != null, "Watermark function can not be null");
+      return toBuilder().setWatermarkFn(watermarkFn).build();
+    }
+
+    @Override
+    public PCollection<V> expand(PBegin input) {
+      validateTransform();
+      return input.apply(new ReadFromSparkReceiverViaSdf<>(this));
+    }
+
+    public void validateTransform() {
+      ReceiverBuilder<V, ? extends Receiver<V>> sparkReceiverBuilder = 
getSparkReceiverBuilder();
+      checkStateNotNull(sparkReceiverBuilder, "withSparkReceiverBuilder() is 
required");
+      checkStateNotNull(getGetOffsetFn(), "withGetOffsetFn() is required");
+    }
+  }
+
+  static class ReadFromSparkReceiverViaSdf<V> extends PTransform<PBegin, 
PCollection<V>> {
+
+    private final Read<V> sparkReceiverRead;
+
+    ReadFromSparkReceiverViaSdf(Read<V> sparkReceiverRead) {
+      this.sparkReceiverRead = sparkReceiverRead;
+    }
+
+    @Override
+    public PCollection<V> expand(PBegin input) {
+      final ReceiverBuilder<V, ? extends Receiver<V>> sparkReceiverBuilder =
+          sparkReceiverRead.getSparkReceiverBuilder();
+      checkStateNotNull(sparkReceiverBuilder, "withSparkReceiverBuilder() is 
required");
+      if 
(!HasOffset.class.isAssignableFrom(sparkReceiverBuilder.getSparkReceiverClass()))
 {
+        throw new UnsupportedOperationException(
+            String.format(
+                "Given Spark Receiver class %s doesn't implement HasOffset 
interface,"
+                    + " therefore it is not supported!",
+                sparkReceiverBuilder.getSparkReceiverClass().getName()));
+      } else {
+        LOG.info("{} started reading", 
ReadFromSparkReceiverWithOffsetDoFn.class.getSimpleName());
+        return input
+            .apply(Impulse.create())
+            .apply(ParDo.of(new 
ReadFromSparkReceiverWithOffsetDoFn<>(sparkReceiverRead)));

Review Comment:
   Could you please merge this implementation to start with, to unblock the 
work on PRs with Spark Recevier integration tests and integration with Cdap IO?
   While we haven't found a way to split the Spark receivers, we continue to 
work in this direction



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to