merlimat closed pull request #3071: [Pulsar-Flink] Add Scala Examples
URL: https://github.com/apache/pulsar/pull/3071
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/pulsar-flink/pom.xml b/pulsar-flink/pom.xml
index b91af16ad3..e48d213aa6 100644
--- a/pulsar-flink/pom.xml
+++ b/pulsar-flink/pom.xml
@@ -51,6 +51,12 @@
       <optional>true</optional>
     </dependency>
 
+    <dependency>
+      <groupId>org.apache.flink</groupId>
+      <artifactId>flink-scala_${scala.binary.version}</artifactId>
+      <version>${flink.version}</version>
+    </dependency>
+
     <dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>pulsar-client</artifactId>
diff --git 
a/pulsar-flink/src/test/java/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkExample.java
 
b/pulsar-flink/src/test/java/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkExample.java
index 7b35065ae0..6724c62a9d 100644
--- 
a/pulsar-flink/src/test/java/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkExample.java
+++ 
b/pulsar-flink/src/test/java/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkExample.java
@@ -50,6 +50,7 @@ public static void main(String[] args) throws Exception {
         // create DataSet
         DataSet<String> textDS = env.fromElements(EINSTEIN_QUOTE);
 
+        // convert sentences to words
         textDS.flatMap(new FlatMapFunction<String, WordWithCount>() {
             @Override
             public void flatMap(String value, Collector<WordWithCount> out) 
throws Exception {
@@ -59,23 +60,32 @@ public void flatMap(String value, Collector<WordWithCount> 
out) throws Exception
                 }
             }
         })
+
         // filter words which length is bigger than 4
         .filter(wordWithCount -> wordWithCount.word.length() > 4)
+
+        // group the words
         .groupBy(new KeySelector<WordWithCount, String>() {
             @Override
             public String getKey(WordWithCount wordWithCount) throws Exception 
{
                 return wordWithCount.word;
             }
         })
+
+        // sum the word counts
         .reduce(new ReduceFunction<WordWithCount>() {
             @Override
             public WordWithCount reduce(WordWithCount wordWithCount1, 
WordWithCount wordWithCount2) throws Exception {
                 return  new WordWithCount(wordWithCount1.word, 
wordWithCount1.count + wordWithCount2.count);
             }
         })
+
         // write batch data to Pulsar
         .output(pulsarOutputFormat);
 
+        // set parallelism to write Pulsar in parallel (optional)
+        env.setParallelism(2);
+
         // execute program
         env.execute("Flink - Pulsar Batch WordCount");
 
diff --git 
a/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchCsvSinkScalaExample.scala
 
b/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchCsvSinkScalaExample.scala
new file mode 100644
index 0000000000..7db844b3c2
--- /dev/null
+++ 
b/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchCsvSinkScalaExample.scala
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.flink.batch.connectors.pulsar.example
+
+import org.apache.flink.api.java.tuple.Tuple4
+import org.apache.flink.api.scala._
+import org.apache.flink.batch.connectors.pulsar.PulsarCsvOutputFormat
+
+/**
+  * Implements a batch Scala program on Pulsar topic by writing Flink DataSet 
as Csv.
+  */
+object FlinkPulsarBatchCsvSinkScalaExample {
+
+  /**
+    * NasaMission Model
+    */
+  private case class NasaMission(id: Int, missionName: String, startYear: Int, 
endYear: Int)
+    extends Tuple4(id, missionName, startYear, endYear)
+
+  private val SERVICE_URL = "pulsar://127.0.0.1:6650"
+  private val TOPIC_NAME = "my-flink-topic"
+
+  private val nasaMissions = List(
+    NasaMission(1, "Mercury program", 1959, 1963),
+    NasaMission(2, "Apollo program", 1961, 1972),
+    NasaMission(3, "Gemini program", 1963, 1966),
+    NasaMission(4, "Skylab", 1973, 1974),
+    NasaMission(5, "Apollo–Soyuz Test Project", 1975, 1975))
+
+  def main(args: Array[String]): Unit = {
+
+    // set up the execution environment
+    val env = ExecutionEnvironment.getExecutionEnvironment
+
+    // create PulsarCsvOutputFormat instance
+    val pulsarCsvOutputFormat =
+      new PulsarCsvOutputFormat[NasaMission](SERVICE_URL, TOPIC_NAME)
+
+    // create DataSet
+    val textDS = env.fromCollection(nasaMissions)
+
+    // map nasa mission names to upper-case
+    textDS.map(nasaMission => NasaMission(
+      nasaMission.id,
+      nasaMission.missionName.toUpperCase,
+      nasaMission.startYear,
+      nasaMission.endYear))
+
+    // filter missions which started after 1970
+    .filter(_.startYear > 1970)
+
+    // write batch data to Pulsar as Csv
+    .output(pulsarCsvOutputFormat)
+
+    // set parallelism to write Pulsar in parallel (optional)
+    env.setParallelism(2)
+
+    // execute program
+    env.execute("Flink - Pulsar Batch Csv")
+  }
+
+}
\ No newline at end of file
diff --git 
a/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchJsonSinkScalaExample.scala
 
b/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchJsonSinkScalaExample.scala
new file mode 100644
index 0000000000..1f7fc19b0f
--- /dev/null
+++ 
b/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchJsonSinkScalaExample.scala
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.flink.batch.connectors.pulsar.example
+
+import org.apache.flink.api.scala._
+import org.apache.flink.batch.connectors.pulsar.PulsarJsonOutputFormat
+
+import scala.beans.BeanProperty
+
+/**
+  * Implements a batch Scala program on Pulsar topic by writing Flink DataSet 
as Json.
+  */
+object FlinkPulsarBatchJsonSinkScalaExample {
+
+  /**
+    * NasaMission Model
+    */
+  private case class NasaMission(@BeanProperty id: Int,
+                         @BeanProperty missionName: String,
+                         @BeanProperty startYear: Int,
+                         @BeanProperty endYear: Int)
+
+  private val nasaMissions = List(
+    NasaMission(1, "Mercury program", 1959, 1963),
+    NasaMission(2, "Apollo program", 1961, 1972),
+    NasaMission(3, "Gemini program", 1963, 1966),
+    NasaMission(4, "Skylab", 1973, 1974),
+    NasaMission(5, "Apollo–Soyuz Test Project", 1975, 1975))
+
+  private val SERVICE_URL = "pulsar://127.0.0.1:6650"
+  private val TOPIC_NAME = "my-flink-topic"
+
+  def main(args: Array[String]): Unit = {
+
+    // set up the execution environment
+    val env = ExecutionEnvironment.getExecutionEnvironment
+
+    // create PulsarJsonOutputFormat instance
+    val pulsarJsonOutputFormat = new 
PulsarJsonOutputFormat[NasaMission](SERVICE_URL, TOPIC_NAME)
+
+    // create DataSet
+    val nasaMissionDS = env.fromCollection(nasaMissions)
+
+    // map nasa mission names to upper-case
+    nasaMissionDS.map(nasaMission =>
+      NasaMission(
+        nasaMission.id,
+        nasaMission.missionName.toUpperCase,
+        nasaMission.startYear,
+        nasaMission.endYear))
+
+    // filter missions which started after 1970
+    .filter(_.startYear > 1970)
+
+    // write batch data to Pulsar
+    .output(pulsarJsonOutputFormat)
+
+    // set parallelism to write Pulsar in parallel (optional)
+    env.setParallelism(2)
+
+    // execute program
+    env.execute("Flink - Pulsar Batch Json")
+  }
+
+}
diff --git 
a/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkScalaExample.scala
 
b/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkScalaExample.scala
new file mode 100644
index 0000000000..5e536cfa3c
--- /dev/null
+++ 
b/pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkScalaExample.scala
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.flink.batch.connectors.pulsar.example
+
+import org.apache.flink.api.common.serialization.SerializationSchema
+import org.apache.flink.api.scala._
+import org.apache.flink.batch.connectors.pulsar.PulsarOutputFormat
+import org.apache.flink.util.Collector
+
+/**
+  * Data type for words with count.
+  */
+case class WordWithCount(word: String, count: Long) {
+  override def toString: String = "WordWithCount { word = " + word + ", count 
= " + count + " }"
+}
+
+/**
+  * Implements a batch word-count Scala program on Pulsar topic by writing 
Flink DataSet.
+  */
+object FlinkPulsarBatchSinkScalaExample {
+
+  private val EINSTEIN_QUOTE = "Imagination is more important than knowledge. 
" +
+    "Knowledge is limited. Imagination encircles the world."
+  private val SERVICE_URL = "pulsar://127.0.0.1:6650"
+  private val TOPIC_NAME = "my-flink-topic"
+
+  def main(args: Array[String]): Unit = {
+
+    // set up the execution environment
+    val env = ExecutionEnvironment.getExecutionEnvironment
+
+    // create PulsarOutputFormat instance
+    val pulsarOutputFormat =
+      new PulsarOutputFormat[WordWithCount](SERVICE_URL, TOPIC_NAME, new 
SerializationSchema[WordWithCount] {
+        override def serialize(wordWithCount: WordWithCount): Array[Byte] = 
wordWithCount.toString.getBytes
+      })
+
+    // create DataSet
+    val textDS = env.fromElements[String](EINSTEIN_QUOTE)
+
+    // convert sentence to words
+    textDS.flatMap((value: String, out: Collector[WordWithCount]) => {
+      val words = value.toLowerCase.split(" ")
+      for (word <- words) {
+        out.collect(new WordWithCount(word.replace(".", ""), 1))
+      }
+    })
+
+    // filter words which length is bigger than 4
+    .filter((wordWithCount: WordWithCount) => wordWithCount.word.length > 4)
+
+    // group the words
+    .groupBy((wordWithCount: WordWithCount) => wordWithCount.word)
+
+    // sum the word counts
+    .reduce((wordWithCount1: WordWithCount, wordWithCount2: WordWithCount) =>
+      new WordWithCount(wordWithCount1.word, wordWithCount1.count + 
wordWithCount2.count))
+
+    // write batch data to Pulsar
+    .output(pulsarOutputFormat)
+
+    // set parallelism to write Pulsar in parallel (optional)
+    env.setParallelism(2)
+
+    // execute program
+    env.execute("Flink - Pulsar Batch WordCount")
+  }
+
+}
\ No newline at end of file


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to