jaketf commented on a change in pull request #11151: [BEAM-9468]  Hl7v2 io
URL: https://github.com/apache/beam/pull/11151#discussion_r404371522
 
 

 ##########
 File path: 
sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IO.java
 ##########
 @@ -0,0 +1,597 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.io.gcp.healthcare;
+
+import com.google.api.services.healthcare.v1beta1.model.Message;
+import com.google.auto.value.AutoValue;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.io.gcp.pubsub.PubsubIO;
+import org.apache.beam.sdk.metrics.Counter;
+import org.apache.beam.sdk.metrics.Metrics;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.util.Sleeper;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionTuple;
+import org.apache.beam.sdk.values.PInput;
+import org.apache.beam.sdk.values.POutput;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.TupleTagList;
+import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Throwables;
+import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * {@link HL7v2IO} provides an API for reading from and writing to <a
+ * href="https://cloud.google.com/healthcare/docs/concepts/hl7v2";>Google Cloud 
Healthcare HL7v2 API.
+ * </a>
+ *
+ * <p>Read
+ *
+ * <p>HL7v2 Messages can be fetched from the HL7v2 store in two ways Message 
Fetching and Message
+ * Listing.
+ *
+ * <p>Message Fetching
+ *
+ * <p>Message Fetching with {@link HL7v2IO.Read} supports use cases where you 
have a ${@link
+ * PCollection} of message IDS. This is appropriate for reading the HL7v2 
notifications from a
+ * Pub/Sub subscription with {@link PubsubIO#readStrings()} or in cases where 
you have a manually
+ * prepared list of messages that you need to process (e.g. in a text file 
read with {@link
+ * org.apache.beam.sdk.io.TextIO}) .
+ *
+ * <p>Fetch Message contents from HL7v2 Store based on the {@link PCollection} 
of message ID strings
+ * {@link HL7v2IO.Read.Result} where one can call {@link 
Read.Result#getMessages()} to retrived a
+ * {@link PCollection} containing the successfully fetched {@link 
HL7v2Message}s and/or {@link
+ * Read.Result#getFailedReads()} to retrieve a {@link PCollection} of {@link 
HealthcareIOError}
+ * containing the msgID that could not be fetched and the exception as a 
{@link HealthcareIOError},
+ * this can be used to write to the dead letter storage system of your 
choosing. This error handling
+ * is mainly to catch scenarios where the upstream {@link PCollection} 
contains IDs that are not
+ * valid or are not reachable due to permissions issues.
+ *
+ * <p>Message Listing Message Listing with {@link HL7v2IO.ListHL7v2Messages} 
supports batch use
+ * cases where you want to process all the messages in an HL7v2 store or those 
matching a
+ * filter @see <a
+ * 
href=>https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list#query-parameters</a>
+ * This paginates through results of a Messages.List call @see <a
+ * 
href=>https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list</a>
+ * and outputs directly to a {@link PCollection} of {@link HL7v2Message}. In 
these use cases, the
+ * error handling similar to above is unnecessary because we are listing from 
the source of truth
+ * the pipeline should fail transparently if this transform fails to paginate 
through all the
+ * results.
+ *
+ * <p>Write
+ *
+ * <p>A bounded or unbounded {@link PCollection} of {@link HL7v2Message} can 
be ingested into an
+ * HL7v2 store using {@link HL7v2IO#ingestMessages(String)}. This will return 
a {@link
+ * HL7v2IO.Write.Result} on which you can call {@link 
Write.Result#getFailedInsertsWithErr()} to
+ * retrieve a {@link PCollection} of {@link HealthcareIOError} containing the 
{@link HL7v2Message}
+ * that failed to be ingested and the exception. This can be used to write to 
the dead letter
+ * storage system of your chosing.
+ *
+ * <p>Unbounded Example:
+ *
+ * <pre>{@code
+ * PipelineOptions options = ...;
+ * Pipeline p = Pipeline.create(options);
+ *
+ * HL7v2IO.Read.Result readResult = p
+ *   .apply(
+ *     "Read HL7v2 notifications",
+ *     PubSubIO.readStrings().fromTopic(options.getNotificationSubscription()))
+ *   .apply(HL7v2IO.readAll());
+ *
+ * // Write errors to your favorite dead letter  queue (e.g. Pub/Sub, GCS, 
BigQuery)
+ * readResult.getFailedReads().apply("WriteToDeadLetterQueue", ...);
+ *
+ *
+ * // Go about your happy path transformations.
+ * PCollection<HL7v2Message> out = 
readResult.getMessages().apply("ProcessFetchedMessages", ...);
+ *
+ * // Write using the Message.Ingest method of the HL7v2 REST API.
+ * out.apply(HL7v2IO.ingestMessages(options.getOutputHL7v2Store()));
+ *
+ * pipeline.run();
+ *
+ * }***
+ * </pre>
+ *
+ * <p>Bounded Read Example:
+ *
+ * <pre>{@code
+ * PipelineOptions options = ...;
+ * Pipeline p = Pipeline.create(options);
+ *
+ * PCollection<HL7v2Message> out = p
+ *   .apply(
+ *       "List messages in HL7v2 store with filter",
+ *       ListHL7v2Messages(
+ *           Collections.singletonList(options.getInputHL7v2Store()), 
option.getHL7v2Filter()))
+ *    // Go about your happy path transformations.
+ *   .apply("Process HL7v2 Messages", ...);
+ * pipeline.run().waitUntilFinish();
+ * }***
+ * </pre>
+ */
+public class HL7v2IO {
+
+  private static Write.Builder write(String hl7v2Store) {
+    return new AutoValue_HL7v2IO_Write.Builder().setHL7v2Store(hl7v2Store);
+  }
+
+  public static Read readAll() {
+    return new Read();
+  }
+
+  /**
+   * Write with Messages.Ingest method. @see <a
+   * 
href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/ingest></a>
+   *
+   * @param hl7v2Store the hl 7 v 2 store
+   * @return the write
+   */
+  public static Write ingestMessages(String hl7v2Store) {
+    return write(hl7v2Store).setWriteMethod(Write.WriteMethod.INGEST).build();
+  }
+
+  // TODO add hyper links to this doc string.
+  /**
+   * The type Read that reads HL7v2 message contents given a PCollection of 
message IDs strings.
+   *
+   * <p>These could be sourced from any {@link PCollection} of {@link String}s 
but the most popular
+   * patterns would be {@link PubsubIO#readStrings()} reading a subscription 
on an HL7v2 Store's
+   * notification channel topic or using {@link ListHL7v2Messages} to list 
HL7v2 message IDs with an
+   * optional filter using Ingest write method. @see <a
+   * 
href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list></a>.
+   */
+  public static class Read extends PTransform<PCollection<String>, 
Read.Result> {
+
+    public Read() {}
+
+    public static class Result implements POutput, PInput {
+      private PCollection<HL7v2Message> messages;
+
+      private PCollection<HealthcareIOError<String>> failedReads;
+      PCollectionTuple pct;
+
+      public static Result of(PCollectionTuple pct) throws 
IllegalArgumentException {
+        if (pct.getAll()
+            .keySet()
+            .containsAll((Collection<?>) 
TupleTagList.of(OUT).and(DEAD_LETTER))) {
+          return new Result(pct);
+        } else {
+          throw new IllegalArgumentException(
+              "The PCollection tuple must have the HL7v2IO.Read.OUT "
+                  + "and HL7v2IO.Read.DEAD_LETTER tuple tags");
+        }
+      }
+
+      private Result(PCollectionTuple pct) {
+        this.pct = pct;
+        this.messages = pct.get(OUT).setCoder(new HL7v2MessageCoder());
+        this.failedReads =
+            pct.get(DEAD_LETTER).setCoder(new 
HealthcareIOErrorCoder<>(StringUtf8Coder.of()));
+      }
+
+      public PCollection<HealthcareIOError<String>> getFailedReads() {
+        return failedReads;
+      }
+
+      public PCollection<HL7v2Message> getMessages() {
+        return messages;
+      }
+
+      @Override
+      public Pipeline getPipeline() {
+        return this.pct.getPipeline();
+      }
+
+      @Override
+      public Map<TupleTag<?>, PValue> expand() {
+        return ImmutableMap.of(OUT, messages);
+      }
+
+      @Override
+      public void finishSpecifyingOutput(
+          String transformName, PInput input, PTransform<?, ?> transform) {}
+    }
+
+    /** The tag for the main output of HL7v2 Messages. */
+    public static final TupleTag<HL7v2Message> OUT = new 
TupleTag<HL7v2Message>() {};
+    /** The tag for the deadletter output of HL7v2 Messages. */
+    public static final TupleTag<HealthcareIOError<String>> DEAD_LETTER =
+        new TupleTag<HealthcareIOError<String>>() {};
+
+    @Override
+    public Result expand(PCollection<String> input) {
+      return input.apply("Fetch HL7v2 messages", new FetchHL7v2Message());
+    }
+
+    /**
+     * DoFn to fetch a message from an Google Cloud Healthcare HL7v2 store 
based on msgID
+     *
+     * <p>This DoFn consumes a {@link PCollection} of notifications {@link 
String}s from the HL7v2
+     * store, and fetches the actual {@link HL7v2Message} object based on the 
id in the notification
+     * and will output a {@link PCollectionTuple} which contains the output 
and dead-letter {@link
+     * PCollection}.
+     *
+     * <p>The {@link PCollectionTuple} output will contain the following 
{@link PCollection}:
+     *
+     * <ul>
+     *   <li>{@link HL7v2IO.Read#OUT} - Contains all {@link PCollection} 
records successfully read
+     *       from the HL7v2 store.
+     *   <li>{@link HL7v2IO.Read#DEAD_LETTER} - Contains all {@link 
PCollection} of {@link
+     *       HealthcareIOError} message IDs which failed to be fetched from 
the HL7v2 store, with
+     *       error message and stacktrace.
+     * </ul>
+     */
+    public static class FetchHL7v2Message extends 
PTransform<PCollection<String>, Result> {
+
+      /** Instantiates a new Fetch HL7v2 message DoFn. */
+      public FetchHL7v2Message() {}
+
+      @Override
+      public Result expand(PCollection<String> msgIds) {
+        return new Result(
+            msgIds.apply(
+                ParDo.of(new FetchHL7v2Message.HL7v2MessageGetFn())
+                    .withOutputTags(HL7v2IO.Read.OUT, 
TupleTagList.of(HL7v2IO.Read.DEAD_LETTER))));
+      }
+
+      /** DoFn for fetching messages from the HL7v2 store with error handling. 
*/
+      public static class HL7v2MessageGetFn extends DoFn<String, HL7v2Message> 
{
+
+        private Counter failedMessageGets =
+            Metrics.counter(FetchHL7v2Message.HL7v2MessageGetFn.class, 
"failed-message-reads");
+        private static final Logger LOG =
+            LoggerFactory.getLogger(FetchHL7v2Message.HL7v2MessageGetFn.class);
+        private final Counter successfulHL7v2MessageGets =
+            Metrics.counter(
+                FetchHL7v2Message.HL7v2MessageGetFn.class, 
"successful-hl7v2-message-gets");
+        private HealthcareApiClient client;
+
+        /** Instantiates a new Hl 7 v 2 message get fn. */
+        HL7v2MessageGetFn() {}
+
+        /**
+         * Instantiate healthcare client.
+         *
+         * @throws IOException the io exception
+         */
+        @Setup
+        public void instantiateHealthcareClient() throws IOException {
+          this.client = new HttpHealthcareApiClient();
+        }
+
+        /**
+         * Process element.
+         *
+         * @param context the context
+         */
+        @ProcessElement
+        public void processElement(ProcessContext context) {
+          String msgId = context.element();
+          try {
+            context.output(HL7v2Message.fromModel(fetchMessage(this.client, 
msgId)));
+          } catch (Exception e) {
+            failedMessageGets.inc();
+            LOG.warn(
+                String.format(
+                    "Error fetching HL7v2 message with ID %s writing to Dead 
Letter "
+                        + "Queue. Cause: %s Stack Trace: %s",
+                    msgId, e.getMessage(), 
Throwables.getStackTraceAsString(e)));
+            context.output(HL7v2IO.Read.DEAD_LETTER, 
HealthcareIOError.of(msgId, e));
+          }
+        }
+
+        private Message fetchMessage(HealthcareApiClient client, String msgId)
+            throws IOException, ParseException, IllegalArgumentException, 
InterruptedException {
+          long startTime = System.currentTimeMillis();
+
+          com.google.api.services.healthcare.v1beta1.model.Message msg =
 
 Review comment:
   API only supports 
[get](https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/get)
 an individual resource or 
[list](https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list)
 resources w/ a filter.
   
   One day if we could requests a list of message IDs that would be a great 
optimization!
   @lastomato do you know if HL7 search or any other roadmap for API to support 
this?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to