jaketf commented on a change in pull request #11151: [BEAM-9468]  Hl7v2 io
URL: https://github.com/apache/beam/pull/11151#discussion_r404368939
 
 

 ##########
 File path: 
sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HL7v2IO.java
 ##########
 @@ -0,0 +1,597 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.io.gcp.healthcare;
+
+import com.google.api.services.healthcare.v1beta1.model.Message;
+import com.google.auto.value.AutoValue;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.io.gcp.pubsub.PubsubIO;
+import org.apache.beam.sdk.metrics.Counter;
+import org.apache.beam.sdk.metrics.Metrics;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.util.Sleeper;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionTuple;
+import org.apache.beam.sdk.values.PInput;
+import org.apache.beam.sdk.values.POutput;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.TupleTagList;
+import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Throwables;
+import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * {@link HL7v2IO} provides an API for reading from and writing to <a
+ * href="https://cloud.google.com/healthcare/docs/concepts/hl7v2";>Google Cloud 
Healthcare HL7v2 API.
+ * </a>
+ *
+ * <p>Read
+ *
+ * <p>HL7v2 Messages can be fetched from the HL7v2 store in two ways Message 
Fetching and Message
+ * Listing.
+ *
+ * <p>Message Fetching
+ *
+ * <p>Message Fetching with {@link HL7v2IO.Read} supports use cases where you 
have a ${@link
+ * PCollection} of message IDS. This is appropriate for reading the HL7v2 
notifications from a
+ * Pub/Sub subscription with {@link PubsubIO#readStrings()} or in cases where 
you have a manually
+ * prepared list of messages that you need to process (e.g. in a text file 
read with {@link
+ * org.apache.beam.sdk.io.TextIO}) .
+ *
+ * <p>Fetch Message contents from HL7v2 Store based on the {@link PCollection} 
of message ID strings
+ * {@link HL7v2IO.Read.Result} where one can call {@link 
Read.Result#getMessages()} to retrived a
+ * {@link PCollection} containing the successfully fetched {@link 
HL7v2Message}s and/or {@link
+ * Read.Result#getFailedReads()} to retrieve a {@link PCollection} of {@link 
HealthcareIOError}
+ * containing the msgID that could not be fetched and the exception as a 
{@link HealthcareIOError},
+ * this can be used to write to the dead letter storage system of your 
choosing. This error handling
+ * is mainly to catch scenarios where the upstream {@link PCollection} 
contains IDs that are not
+ * valid or are not reachable due to permissions issues.
+ *
+ * <p>Message Listing Message Listing with {@link HL7v2IO.ListHL7v2Messages} 
supports batch use
+ * cases where you want to process all the messages in an HL7v2 store or those 
matching a
+ * filter @see <a
+ * 
href=>https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list#query-parameters</a>
+ * This paginates through results of a Messages.List call @see <a
+ * 
href=>https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/list</a>
+ * and outputs directly to a {@link PCollection} of {@link HL7v2Message}. In 
these use cases, the
+ * error handling similar to above is unnecessary because we are listing from 
the source of truth
+ * the pipeline should fail transparently if this transform fails to paginate 
through all the
+ * results.
+ *
+ * <p>Write
+ *
+ * <p>A bounded or unbounded {@link PCollection} of {@link HL7v2Message} can 
be ingested into an
+ * HL7v2 store using {@link HL7v2IO#ingestMessages(String)}. This will return 
a {@link
+ * HL7v2IO.Write.Result} on which you can call {@link 
Write.Result#getFailedInsertsWithErr()} to
+ * retrieve a {@link PCollection} of {@link HealthcareIOError} containing the 
{@link HL7v2Message}
+ * that failed to be ingested and the exception. This can be used to write to 
the dead letter
+ * storage system of your chosing.
+ *
+ * <p>Unbounded Example:
+ *
+ * <pre>{@code
+ * PipelineOptions options = ...;
+ * Pipeline p = Pipeline.create(options);
+ *
+ * HL7v2IO.Read.Result readResult = p
+ *   .apply(
+ *     "Read HL7v2 notifications",
+ *     PubSubIO.readStrings().fromTopic(options.getNotificationSubscription()))
+ *   .apply(HL7v2IO.readAll());
+ *
+ * // Write errors to your favorite dead letter  queue (e.g. Pub/Sub, GCS, 
BigQuery)
+ * readResult.getFailedReads().apply("WriteToDeadLetterQueue", ...);
+ *
+ *
+ * // Go about your happy path transformations.
+ * PCollection<HL7v2Message> out = 
readResult.getMessages().apply("ProcessFetchedMessages", ...);
+ *
+ * // Write using the Message.Ingest method of the HL7v2 REST API.
+ * out.apply(HL7v2IO.ingestMessages(options.getOutputHL7v2Store()));
+ *
+ * pipeline.run();
+ *
+ * }***
+ * </pre>
+ *
+ * <p>Bounded Read Example:
+ *
+ * <pre>{@code
+ * PipelineOptions options = ...;
+ * Pipeline p = Pipeline.create(options);
+ *
+ * PCollection<HL7v2Message> out = p
+ *   .apply(
+ *       "List messages in HL7v2 store with filter",
+ *       ListHL7v2Messages(
+ *           Collections.singletonList(options.getInputHL7v2Store()), 
option.getHL7v2Filter()))
+ *    // Go about your happy path transformations.
+ *   .apply("Process HL7v2 Messages", ...);
+ * pipeline.run().waitUntilFinish();
+ * }***
+ * </pre>
+ */
+public class HL7v2IO {
+
+  private static Write.Builder write(String hl7v2Store) {
+    return new AutoValue_HL7v2IO_Write.Builder().setHL7v2Store(hl7v2Store);
+  }
+
+  public static Read readAll() {
+    return new Read();
+  }
+
+  /**
+   * Write with Messages.Ingest method. @see <a
+   * 
href=https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/ingest></a>
+   *
+   * @param hl7v2Store the hl 7 v 2 store
+   * @return the write
+   */
+  public static Write ingestMessages(String hl7v2Store) {
 
 Review comment:
   This is supposed to mirror the [REST API 
method](https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.hl7V2Stores.messages/ingest)
 that's used. 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to