This is an automated email from the ASF dual-hosted git repository.
fanningpj pushed a commit to branch main
in repository
https://gitbox.apache.org/repos/asf/incubator-pekko-connectors-samples.git
The following commit(s) were added to refs/heads/main by this push:
new 4c59125 [mqtt-http-to-s3-java] Migrate akka dependencies to pekko
(#20)
4c59125 is described below
commit 4c59125b85da55fd6561fc7247c8d62458270111
Author: Laglangyue <[email protected]>
AuthorDate: Sun Dec 17 18:21:07 2023 +0800
[mqtt-http-to-s3-java] Migrate akka dependencies to pekko (#20)
* [mqtt-http-to-s3-java] Migrate akka dependencies to pekko
* [mqtt-http-to-s3-java] Migrate akka dependencies to pekko
* [mqtt-http-to-s3-java] Migrate akka dependencies to pekko
* [mqtt-http-to-s3-java] Migrate akka dependencies to pekko
* [mqtt-http-to-s3-java] Migrate akka dependencies to pekko
* [mqtt-http-to-s3-java] Migrate akka dependencies to pekko
* doc build
---------
Co-authored-by: laglang <[email protected]>
Co-authored-by: PJ Fanning <[email protected]>
---
docs/build.sbt | 101 ++++++++++-----------
.../.courseName | 2 +-
.../README.md | 2 +-
.../step_003_parse_csv/README.md | 2 +-
.../step_006_coordinated_shutdown/README.md | 2 +-
.../README.md | 2 +-
.../docs/src/main/paradox/example.md | 4 +-
.../README.md | 2 +-
.../build.sbt | 23 ++---
.../sample/triggereddownload/DownloadCommand.java | 22 -----
.../alpakka/sample/triggereddownload/Main.java | 82 -----------------
.../triggereddownload/PublishDataToMqtt.java | 56 ------------
.../sample/triggereddownload/DownloadCommand.java | 23 +++++
.../main/java/sample/triggereddownload/Main.java | 87 ++++++++++++++++++
.../triggereddownload/PublishDataToMqtt.java | 57 ++++++++++++
.../src/main/resources/application.conf | 8 +-
.../src/main/resources/credentials.conf-RENAME | 2 +-
.../src/main/resources/logback.xml | 2 +-
pekko-connectors-sample-mqtt-to-kafka/README.md | 2 +-
.../docs/src/main/paradox/example.md | 2 +-
.../README.md | 2 +-
21 files changed, 246 insertions(+), 239 deletions(-)
diff --git a/docs/build.sbt b/docs/build.sbt
index d002fbb..6f1b6a1 100644
--- a/docs/build.sbt
+++ b/docs/build.sbt
@@ -48,21 +48,24 @@ HttpCsvToKafka / paradoxProperties ++= Map(
"snip.build.base_dir" ->
s"${baseDirectory.value}/../pekko-connectors-sample-${HttpCsvToKafka.name}",
"github.root.base_dir" -> s"${baseDirectory.value}/..",
// Pekko Connectors
- "scaladoc.akka.stream.alpakka.base_url" ->
s"https://doc.akka.io/api/alpakka/${Dependencies.HttpCsvToKafka.PekkoConnectorsVersion}",
- "javadoc.akka.base_url" -> "",
- "extref.alpakka.base_url" ->
s"https://doc.akka.io/docs/alpakka/${Dependencies.HttpCsvToKafka.PekkoConnectorsVersion}/%s",
+ "scaladoc.org.apache.pekko.stream.connectors.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.HttpCsvToKafka.PekkoConnectorsVersion}",
+ "javadoc.org.apache.pekko.stream.connectors.base_url" -> "",
+ "extref.pekko-connectors.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors/${Dependencies.HttpCsvToKafka.PekkoConnectorsVersion}/%s",
// Pekko Connectors Kafka
- "scaladoc.akka.kafka.base_url" ->
s"https://doc.akka.io/api/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}",
- "javadoc.akka.kafka.base_url" -> "",
- "extref.pekko-connectors-kafka.base_url" ->
s"https://doc.akka.io/docs/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}/%s",
+ "scaladoc.org.apache.pekko.kafka.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}",
+ "javadoc.org.apache.pekko.kafka.base_url" -> "",
+ "extref.pekko-connectors-kafka.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}/%s",
// Pekko
- "scaladoc.akka.base_url" ->
s"https://doc.akka.io/api/akka/${Dependencies.HttpCsvToKafka.PekkoVersion}",
- "javadoc.akka.base_url" ->
s"https://doc.akka.io/japi/akka/${Dependencies.HttpCsvToKafka.PekkoVersion}",
- "extref.akka.base_url" ->
s"https://doc.akka.io/docs/akka/${Dependencies.HttpCsvToKafka.PekkoVersion}/%s",
+ "scaladoc.akka.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}",
+ "scaladoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}",
+ "javadoc.akka.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}",
+ "javadoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}",
+ "extref.akka.base_url" ->
s"https://pekko.apache.org/docs/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}/%s",
+ "extref.pekko.base_url" ->
s"https://pekko.apache.org/docs/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}/%s",
// Pekko HTTP
- "scaladoc.akka.http.base_url" ->
s"https://doc.akka.io/api/akka-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}",
- "javadoc.akka.http.base_url" ->
s"https://doc.akka.io/japi/akka-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}",
- "extref.akka-http.base_url" ->
s"https://doc.akka.io/docs/akka-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}/%s",
+ "scaladoc.org.apache.pekko.http.base_url" ->
s"https://pekko.apache.org/api/pekko-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}",
+ "javadoc.org.apache.pekko.http.base_url" ->
s"https://pekko.apache.org/japi/pekko-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}",
+ "extref.pekko-http.base_url" ->
s"https://pekko.apache.org/docs/pekko-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}/%s",
)
HttpCsvToKafka / paradoxGroups := Map("Language" -> Seq("Java", "Scala"))
@@ -77,12 +80,12 @@ JdbcToElasticsearch / paradoxProperties ++= Map(
"canonical.base_url" -> s"${homepage.value.get}/${JdbcToElasticsearch.name}",
"snip.build.base_dir" ->
s"${baseDirectory.value}/../pekko-connectors-sample-${JdbcToElasticsearch.name}",
"github.root.base_dir" -> s"${baseDirectory.value}/..",
- "scaladoc.pekko.stream.pekko-connectors.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.JdbcToElasticsearch.PekkoConnectorsVersion}",
- "javadoc.pekko.base_url" -> "",
+ "scaladoc.org.apache.pekko.stream.connectors.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.JdbcToElasticsearch.PekkoConnectorsVersion}",
+ "javadoc.org.apache.pekko.base_url" -> "",
"extref.pekko-connectors.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors/${Dependencies.JdbcToElasticsearch.PekkoConnectorsVersion}/%s",
// Pekko
- "scaladoc.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.JdbcToElasticsearch.PekkoVersion}",
- "javadoc.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.JdbcToElasticsearch.PekkoVersion}",
+ "scaladoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.JdbcToElasticsearch.PekkoVersion}",
+ "javadoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.JdbcToElasticsearch.PekkoVersion}",
"extref.pekko.base_url" ->
s"https://pekko.apache.org/docs/pekko/${Dependencies.JdbcToElasticsearch.PekkoVersion}/%s",
)
JdbcToElasticsearch / paradoxGroups := Map("Language" -> Seq("Java", "Scala"))
@@ -99,12 +102,12 @@ Jms / paradoxProperties ++= Map(
"snip.build.base_dir" ->
s"${baseDirectory.value}/../pekko-connectors-sample-${Jms.name}",
"github.root.base_dir" -> s"${baseDirectory.value}/..",
// Pekko Connectors
- "scaladoc.pekko.stream.pekko-connectors.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.Jms.PekkoConnectorsVersion}",
- "javadoc.pekko.base_url" -> "",
+ "scaladoc.org.apache.pekko.stream.connectors.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.Jms.PekkoConnectorsVersion}",
+ "javadoc.org.apache.pekko.stream.connectors.base_url" -> "",
"extref.pekko-connectors.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors/${Dependencies.Jms.PekkoConnectorsVersion}/%s",
// Pekko
- "scaladoc.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.Jms.PekkoVersion}",
- "javadoc.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.Jms.PekkoVersion}",
+ "scaladoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.Jms.PekkoVersion}",
+ "javadoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.Jms.PekkoVersion}",
"extref.pekko.base_url" ->
s"https://pekko.apache.org/docs/pekko/${Dependencies.Jms.PekkoVersion}/%s",
// Pekko HTTP
"scaladoc.org.apache.pekko.http.base_url" ->
s"https://pekko.apache.org/api/pekko-http/${Dependencies.Jms.PekkoHttpVersion}",
@@ -151,22 +154,18 @@ KafkaToWebsocketClients / paradoxProperties ++= Map(
"canonical.base_url" ->
s"${homepage.value.get}/${KafkaToWebsocketClients.name}",
"snip.build.base_dir" ->
s"${baseDirectory.value}/../pekko-connectors-sample-${KafkaToWebsocketClients.name}",
"github.root.base_dir" -> s"${baseDirectory.value}/..",
- // Pekko Connectors
-// "scaladoc.akka.stream.alpakka.base_url" ->
s"https://doc.akka.io/api/alpakka/${Dependencies.KafkaToWebsocketClients.PekkoConnectorsVersion}",
-// "javadoc.akka.base_url" -> "",
-// "extref.alpakka.base_url" ->
s"https://doc.akka.io/docs/alpakka/${Dependencies.KafkaToWebsocketClients.PekkoConnectorsVersion}/%s",
// Pekko Connectors Kafka
- "scaladoc.akka.kafka.base_url" ->
s"https://doc.akka.io/api/pekko-connectors-kafka/${Dependencies.KafkaToWebsocketClients.PekkoConnectorsKafkaVersion}",
- "javadoc.akka.kafka.base_url" -> "",
- "extref.pekko-connectors-kafka.base_url" ->
s"https://doc.akka.io/docs/pekko-connectors-kafka/${Dependencies.KafkaToWebsocketClients.PekkoConnectorsKafkaVersion}/%s",
+ "javadoc.org.apache.pekko.kafka.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors-kafka/${Dependencies.KafkaToWebsocketClients.PekkoConnectorsKafkaVersion}",
+ "javadoc.org.apache.pekko.kafka.base_url" -> "",
+ "extref.pekko-connectors-kafka.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors-kafka/${Dependencies.KafkaToWebsocketClients.PekkoConnectorsKafkaVersion}/%s",
// Pekko
- "scaladoc.akka.base_url" ->
s"https://doc.akka.io/api/akka/${Dependencies.KafkaToWebsocketClients.PekkoVersion}",
- "javadoc.akka.base_url" ->
s"https://doc.akka.io/japi/akka/${Dependencies.KafkaToWebsocketClients.PekkoVersion}",
- "extref.akka.base_url" ->
s"https://doc.akka.io/docs/akka/${Dependencies.KafkaToWebsocketClients.PekkoVersion}/%s",
+ "scaladoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.KafkaToWebsocketClients.PekkoVersion}",
+ "javadoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.KafkaToWebsocketClients.PekkoVersion}",
+ "extref.pekko.base_url" ->
s"https://pekko.apache.org/docs/pekko/${Dependencies.KafkaToWebsocketClients.PekkoVersion}/%s",
// Pekko HTTP
- "scaladoc.akka.http.base_url" ->
s"https://doc.akka.io/api/akka-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}",
- "javadoc.akka.http.base_url" ->
s"https://doc.akka.io/japi/akka-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}",
- "extref.akka-http.base_url" ->
s"https://doc.akka.io/docs/akka-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}/%s",
+ "scaladoc.org.apache.pekko.http.base_url" ->
s"https://pekko.apache.org/api/pekko-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}",
+ "javadoc.org.apache.pekko.http.base_url" ->
s"https://pekko.apache.org/japi/pekko-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}",
+ "extref.pekko-http.base_url" ->
s"https://pekko.apache.org/docs/pekko-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}/%s",
)
KafkaToWebsocketClients / paradoxGroups := Map("Language" -> Seq("Java",
"Scala"))
@@ -182,17 +181,17 @@ MqttToKafka / paradoxProperties ++= Map(
"snip.build.base_dir" ->
s"${baseDirectory.value}/../pekko-connectors-sample-${MqttToKafka.name}",
"github.root.base_dir" -> s"${baseDirectory.value}/..",
// Pekko Connectors
- "scaladoc.akka.stream.alpakka.base_url" ->
s"https://doc.akka.io/api/alpakka/${Dependencies.MqttToKafka.PekkoConnectorsVersion}",
- "javadoc.akka.base_url" -> "",
- "extref.alpakka.base_url" ->
s"https://doc.akka.io/docs/alpakka/${Dependencies.MqttToKafka.PekkoConnectorsVersion}/%s",
+ "javadoc.org.apache.pekko.stream.connectors.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.MqttToKafka.PekkoConnectorsVersion}",
+ "javadoc.org.apache.pekko.base_url" -> "",
+ "extref.pekko-connectors.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors/${Dependencies.MqttToKafka.PekkoConnectorsVersion}/%s",
// Pekko Connectors Kafka
- "scaladoc.akka.kafka.base_url" ->
s"https://doc.akka.io/api/pekko-connectors-kafka/${Dependencies.MqttToKafka.PekkoConnectorsKafkaVersion}",
- "javadoc.akka.kafka.base_url" -> "",
- "extref.pekko-connectors-kafka.base_url" ->
s"https://doc.akka.io/docs/pekko-connectors-kafka/${Dependencies.MqttToKafka.PekkoConnectorsKafkaVersion}/%s",
+ "javadoc.org.apache.pekko.kafka.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors-kafka/${Dependencies.MqttToKafka.PekkoConnectorsKafkaVersion}",
+ "javadoc.org.apache.pekko.kafka.base_url" -> "",
+ "extref.pekko-connectors-kafka.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors-kafka/${Dependencies.MqttToKafka.PekkoConnectorsKafkaVersion}/%s",
// Pekko
- "scaladoc.akka.base_url" ->
s"https://doc.akka.io/api/akka/${Dependencies.MqttToKafka.PekkoVersion}",
- "javadoc.akka.base_url" ->
s"https://doc.akka.io/japi/akka/${Dependencies.MqttToKafka.PekkoVersion}",
- "extref.akka.base_url" ->
s"https://doc.akka.io/docs/akka/${Dependencies.MqttToKafka.PekkoVersion}/%s",
+ "scaladoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.MqttToKafka.PekkoVersion}",
+ "javadoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.MqttToKafka.PekkoVersion}",
+ "extref.pekko.base_url" ->
s"https://pekko.apache.org/docs/pekko/${Dependencies.MqttToKafka.PekkoVersion}/%s",
)
MqttToKafka / paradoxGroups := Map("Language" -> Seq("Java", "Scala"))
@@ -231,22 +230,22 @@ RotateLogsToFtp / paradoxProperties ++= Map(
"snip.build.base_dir" ->
s"${baseDirectory.value}/../pekko-connectors-sample-${RotateLogsToFtp.name}",
"github.root.base_dir" -> s"${baseDirectory.value}/..",
// Pekko Connectors
- "scaladoc.akka.stream.alpakka.base_url" ->
s"https://doc.akka.io/api/alpakka/${Dependencies.RotateLogsToFtp.PekkoConnectorsVersion}",
- "javadoc.akka.base_url" -> "",
- "extref.alpakka.base_url" ->
s"https://doc.akka.io/docs/alpakka/${Dependencies.RotateLogsToFtp.PekkoConnectorsVersion}/%s",
+ "scaladoc.org.apache.pekko.stream.connectors.base_url" ->
s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.RotateLogsToFtp.PekkoConnectorsVersion}",
+ "javadoc.org.apache.pekko.stream.connectors.base_url" -> "",
+ "extref.pekko-connectors.base_url" ->
s"https://pekko.apache.org/docs/pekko-connectors/${Dependencies.RotateLogsToFtp.PekkoConnectorsVersion}/%s",
// Pekko
- "scaladoc.akka.base_url" ->
s"https://doc.akka.io/api/akka/${Dependencies.RotateLogsToFtp.PekkoVersion}",
- "javadoc.akka.base_url" ->
s"https://doc.akka.io/japi/akka/${Dependencies.RotateLogsToFtp.PekkoVersion}",
- "extref.akka.base_url" ->
s"https://doc.akka.io/docs/akka/${Dependencies.RotateLogsToFtp.PekkoVersion}/%s",
+ "scaladoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/api/pekko/${Dependencies.RotateLogsToFtp.PekkoVersion}",
+ "javadoc.org.apache.pekko.base_url" ->
s"https://pekko.apache.org/japi/pekko/${Dependencies.RotateLogsToFtp.PekkoVersion}",
+ "extref.pekko.base_url" ->
s"https://pekko.apache.org/docs/pekko/${Dependencies.RotateLogsToFtp.PekkoVersion}/%s",
)
RotateLogsToFtp / paradoxGroups := Map("Language" -> Seq("Java", "Scala"))
Paradox / siteSubdirName := ""
paradoxProperties ++= Map(
- "extref.akka.base_url" -> "https://doc.akka.io/docs/akka/current/",
- "extref.alpakka.base_url" -> "https://doc.akka.io/docs/alpakka/current/",
- "extref.pekko-connectors-kafka.base_url" ->
"https://doc.akka.io/docs/pekko-connectors-kafka/current/",
+ "extref.pekko.base_url" -> "https://pekko.apache.org/docs/pekko/current/",
+ "extref.pekko-connectors.base_url" ->
"https://pekko.apache.org/docs/pekko-connectors/current/",
+ "extref.pekko-connectors-kafka.base_url" ->
"https://pekko.apache.org/docs/pekko-connectors-kafka/current/",
"extref.ftp-to-file.base_url" -> s"${(FtpToFile / siteSubdirName).value}/",
"extref.http-csv-to-kafka.base_url" -> s"${(HttpCsvToKafka /
siteSubdirName).value}/",
"extref.jdbc-to-elasticsearch.base_url" -> s"${(JdbcToElasticsearch /
siteSubdirName).value}/",
diff --git a/pekko-connectors-sample-http-csv-to-kafka/.courseName
b/pekko-connectors-sample-http-csv-to-kafka/.courseName
index dcfce32..a21f99f 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/.courseName
+++ b/pekko-connectors-sample-http-csv-to-kafka/.courseName
@@ -1 +1 @@
-Alpakka: HTTP CSV to Kafka
+Pekko-Connectors: HTTP CSV to Kafka
diff --git a/pekko-connectors-sample-http-csv-to-kafka/README.md
b/pekko-connectors-sample-http-csv-to-kafka/README.md
index 2327648..88c5b6e 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/README.md
+++ b/pekko-connectors-sample-http-csv-to-kafka/README.md
@@ -2,7 +2,7 @@
## Fetch CSV via Akka HTTP and publish the data as JSON to Kafka
-This example uses @extref[Akka HTTP to send the HTTP
request](akka-http:client-side/connection-level.html#opening-http-connections)
and Akka HTTPs primary JSON support via @extref[Spray
JSON](akka-http:common/json-support.html#spray-json-support) (for Scala) or
Jackson JSON (for Java) to convert the map into a JSON structure which gets
published to a Kafka topic.
+This example uses @extref[Akka HTTP to send the HTTP
request](pekko-http:client-side/connection-level.html#opening-http-connections)
and Akka HTTPs primary JSON support via @extref[Spray
JSON](pekko-http:common/json-support.html#spray-json-support) (for Scala) or
Jackson JSON (for Java) to convert the map into a JSON structure which gets
published to a Kafka topic.
Browse the sources at
@link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-http-csv-to-kafka)
{ open=new }.
diff --git
a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md
b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md
index 8f85b07..cfc76cc 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md
+++ b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md
@@ -2,4 +2,4 @@
### Description
-The binary data in @scaladoc:[ByteString](akka.util.ByteString)s is passed
into @extref:[Pekko-Connectors CSV](alpakka:data-transformations/csv.html) to
be parsed and converted per line into a Map. The stream elements becomes a
@scala[`Map[String, ByteString]`]@java[`Map<String, ByteString>`], one entry
per column using the column headers as keys.
+The binary data in @scaladoc:[ByteString](akka.util.ByteString)s is passed
into @extref:[Pekko-Connectors
CSV](pekko-connectors:data-transformations/csv.html) to be parsed and converted
per line into a Map. The stream elements becomes a @scala[`Map[String,
ByteString]`]@java[`Map<String, ByteString>`], one entry per column using the
column headers as keys.
diff --git
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
index f80b86f..91b9fec 100644
---
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
+++
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
@@ -2,4 +2,4 @@
### Description
-To make sure Akka HTTP is shut down in a proper way it is added to Akka's
@extref:[Coordinated shutdown](akka:actors.html#coordinated-shutdown).
+To make sure Akka HTTP is shut down in a proper way it is added to Akka's
@extref:[Coordinated shutdown](pekko:actors.html#coordinated-shutdown).
diff --git a/pekko-connectors-sample-kafka-to-websocket-clients/README.md
b/pekko-connectors-sample-kafka-to-websocket-clients/README.md
index 870a6e1..6ccf074 100644
--- a/pekko-connectors-sample-kafka-to-websocket-clients/README.md
+++ b/pekko-connectors-sample-kafka-to-websocket-clients/README.md
@@ -2,7 +2,7 @@
## Read from a Kafka topic and push the data to connected websocket clients
-Clients may connect via websockets and will receive data read from a Kafka
topic. The websockets are implemented in @extref[Akka HTTP](akka-http:) and
[Pekko-Connectors Kafka](pekko-connectors-kafka:) subscribes to the Kafka topic.
+Clients may connect via websockets and will receive data read from a Kafka
topic. The websockets are implemented in @extref[Akka HTTP](pekko-http:) and
[Pekko-Connectors Kafka](pekko-connectors-kafka:) subscribes to the Kafka topic.
Browse the sources at
@link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-kafka-to-websocket-clients)
{ open=new }.
diff --git
a/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
b/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
index ec4c4ba..ab84142 100644
---
a/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
+++
b/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
@@ -4,14 +4,14 @@ This page highlights the most important sections of the
example.
### Subscribe to the Kafka topic
-Use an @extref:[Pekko-Connectors Kafka](pekko-connectors-kafka:) consumer to
subscribe to a topic in Kafka. The received `String` values are sent to a
@extref[BroadcastHub](akka:stream/stream-dynamic.html#using-the-broadcasthub)
which creates a `Source` for the clients to connect to.
+Use an @extref:[Pekko-Connectors Kafka](pekko-connectors-kafka:) consumer to
subscribe to a topic in Kafka. The received `String` values are sent to a
@extref[BroadcastHub](pekko:stream/stream-dynamic.html#using-the-broadcasthub)
which creates a `Source` for the clients to connect to.
Java
: @@snip [snip](/src/main/java/samples/javadsl/Main.java) {
#kafka-to-broadcast }
### Handler for websocket requests
-This `websocketHandler` is a `Flow` which will be used when a websocket client
connects. It ignores data sent to it and publishes all data received from the
`topicSource()` which is backed by the
@extref[BroadcastHub](akka:stream/stream-dynamic.html#using-the-broadcasthub).
+This `websocketHandler` is a `Flow` which will be used when a websocket client
connects. It ignores data sent to it and publishes all data received from the
`topicSource()` which is backed by the
@extref[BroadcastHub](pekko:stream/stream-dynamic.html#using-the-broadcasthub).
Java
: @@snip [snip](/src/main/java/samples/javadsl/Main.java) { #websocket-handler
}
diff --git a/pekko-connectors-sample-mqtt-http-to-s3-java/README.md
b/pekko-connectors-sample-mqtt-http-to-s3-java/README.md
index de253c3..5334ef3 100644
--- a/pekko-connectors-sample-mqtt-http-to-s3-java/README.md
+++ b/pekko-connectors-sample-mqtt-http-to-s3-java/README.md
@@ -5,6 +5,6 @@
Running this requires:
- MQTT broker running (provided via docker-compose)
- AWS S3 account configured in ./src/main/resources/credentials.conf as
exemplified in credentials.conf-RENAME
-- AWS bucket "alpakka.samples" created
+- AWS bucket "pekko.connectors.samples" created
Messages to MQTT can be published by `PublishDataToMqtt`.
diff --git a/pekko-connectors-sample-mqtt-http-to-s3-java/build.sbt
b/pekko-connectors-sample-mqtt-http-to-s3-java/build.sbt
index bdbdc20..0357796 100644
--- a/pekko-connectors-sample-mqtt-http-to-s3-java/build.sbt
+++ b/pekko-connectors-sample-mqtt-http-to-s3-java/build.sbt
@@ -3,19 +3,20 @@ name := "pekko-connectors-samples-mqtt-http-to-s3-java"
ThisBuild / scalaVersion := "2.13.12"
-val PekkoVersion = "2.6.19"
-val AkkaHttpVersion = "10.1.12"
+val PekkoVersion = "1.0.2"
+val PekkoHttpVersion = "1.0.0"
+val PekkoConnectorsVersion = "1.0.1"
libraryDependencies ++= Seq(
- "com.typesafe.akka" %% "akka-slf4j" % PekkoVersion,
- "com.typesafe.akka" %% "akka-stream" % PekkoVersion,
- "com.typesafe.akka" %% "akka-actor-typed" % PekkoVersion,
- "com.typesafe.akka" %% "akka-actor" % PekkoVersion,
- "com.typesafe.akka" %% "akka-http" % AkkaHttpVersion,
- "com.lightbend.akka" %% "akka-stream-alpakka-mqtt" % "3.0.4",
- "com.lightbend.akka" %% "akka-stream-alpakka-s3" % "3.0.4",
- "com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8" % "2.11.4",
- "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.11.4",
+ "org.apache.pekko" %% "pekko-slf4j" % PekkoVersion,
+ "org.apache.pekko" %% "pekko-stream" % PekkoVersion,
+ "org.apache.pekko" %% "pekko-actor-typed" % PekkoVersion,
+ "org.apache.pekko" %% "pekko-actor" % PekkoVersion,
+ "org.apache.pekko" %% "pekko-http" % PekkoHttpVersion,
+ "org.apache.pekko" %% "pekko-connectors-s3" % PekkoConnectorsVersion,
+ "org.apache.pekko" %% "pekko-connectors-mqtt" % PekkoConnectorsVersion,
+ "com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8" % "2.14.3",
+ "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.14.3",
"ch.qos.logback" % "logback-classic" % "1.2.13"
)
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/DownloadCommand.java
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/DownloadCommand.java
deleted file mode 100644
index 7a07323..0000000
---
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/DownloadCommand.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package alpakka.sample.triggereddownload;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.time.Instant;
-
-final class DownloadCommand {
- public final Instant timestamp;
- public final String url;
-
- @JsonCreator
- public DownloadCommand(@JsonProperty("timestamp") Instant timestamp,
@JsonProperty("url") String url) {
- this.timestamp = timestamp;
- this.url = url;
- }
-
- @Override
- public String toString() {
- return "DownloadCommand(" + timestamp.toString() + ", url=" + url +
")";
- }
-}
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/Main.java
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/Main.java
deleted file mode 100644
index 281d3d2..0000000
---
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/Main.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package alpakka.sample.triggereddownload;
-
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import static akka.actor.typed.javadsl.Adapter.*;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.ContentTypes;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.Uri;
-import akka.stream.alpakka.mqtt.MqttConnectionSettings;
-import akka.stream.alpakka.mqtt.MqttQoS;
-import akka.stream.alpakka.mqtt.MqttSubscriptions;
-import akka.stream.alpakka.mqtt.javadsl.MqttSource;
-import akka.stream.alpakka.s3.javadsl.S3;
-import akka.stream.javadsl.Source;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectReader;
-import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
-import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence;
-
-import java.time.LocalDateTime;
-import java.time.format.DateTimeFormatter;
-
-public class Main {
-
- final ActorSystem<Void> system;
- final Http http;
-
- public static void main(String[] args) throws Exception {
- Main me = new Main();
- me.run();
- }
-
- public Main() {
- system = ActorSystem.create(Behaviors.empty(), "MqttHttpToS3");
- http = Http.get(toClassic(system));
- }
-
- final ObjectMapper mapper = new ObjectMapper().registerModule(new
JavaTimeModule());
- final ObjectReader downloadCommandReader =
mapper.readerFor(DownloadCommand.class);
-
- final String mqttBroker = "tcp://localhost:1883";
- // Remember to set up topic in MQTT server's acl config
- final String topic = "downloads/trigger";
- final String s3Bucket = "alpakka.samples";
-
- private String createS3BucketKey(DownloadCommand info) {
- return
LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) +
Uri.create(info.url).getPathString().replace("/", "-");
- }
-
- void run() throws Exception {
- final MqttConnectionSettings mqttConnectionSettings =
- MqttConnectionSettings
- .create(
- mqttBroker,
- "upload-control",
- new MemoryPersistence()
- );
-
- @SuppressWarnings("unchecked")
- MqttSubscriptions mqttSubscriptions =
- MqttSubscriptions.create(topic, MqttQoS.atLeastOnce());
-
- MqttSource
- .atMostOnce(mqttConnectionSettings, mqttSubscriptions, 8)
- .map(m -> m.payload().utf8String())
- .<DownloadCommand>map(downloadCommandReader::readValue)
- .mapAsync(4, info -> {
- String s3BucketKey = createS3BucketKey(info);
- return Source.single(info.url)
- .map(HttpRequest::GET)
- .mapAsync(1, http::singleRequest)
- .flatMapConcat(httpResponse ->
httpResponse.entity().getDataBytes())
- .runWith(S3.multipartUpload(s3Bucket,
s3BucketKey, ContentTypes.TEXT_HTML_UTF8), system);
- }
- )
- .runForeach(res -> System.out.println(res), system)
- .exceptionally(e -> { e.printStackTrace(); return Done.done();
});
- }
-
-}
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/PublishDataToMqtt.java
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/PublishDataToMqtt.java
deleted file mode 100644
index 4e7f66e..0000000
---
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/alpakka/sample/triggereddownload/PublishDataToMqtt.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package alpakka.sample.triggereddownload;
-
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import static akka.actor.typed.javadsl.Adapter.*;
-import akka.http.javadsl.Http;
-import akka.stream.alpakka.mqtt.MqttConnectionSettings;
-import akka.stream.alpakka.mqtt.MqttMessage;
-import akka.stream.alpakka.mqtt.MqttQoS;
-import akka.stream.alpakka.mqtt.javadsl.MqttSink;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectWriter;
-import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
-import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence;
-
-import java.time.Duration;
-import java.time.Instant;
-import java.util.concurrent.CompletionStage;
-
-public class PublishDataToMqtt {
- final ActorSystem<Void> system;
- final Http http;
-
- public static void main(String[] args) throws Exception {
- PublishDataToMqtt me = new PublishDataToMqtt();
- me.run();
- }
-
- public PublishDataToMqtt() {
- system = ActorSystem.create(Behaviors.empty(), "PublishDataToMqTT");
- http = Http.get(toClassic(system));
- }
-
- final ObjectMapper mapper = new ObjectMapper().registerModule(new
JavaTimeModule());
- final ObjectWriter downloadCommandWriter =
mapper.writerFor(DownloadCommand.class);
-
-
- void run() throws Exception {
-
-
- final MqttConnectionSettings connectionSettings =
- MqttConnectionSettings.create(
- "tcp://localhost:1883", "test-java-client", new
MemoryPersistence());
- Sink<MqttMessage, CompletionStage<Done>> mqttSink =
-
MqttSink.create(connectionSettings.withClientId("source-test/sink"),
MqttQoS.atLeastOnce());
-
- DownloadCommand command = new DownloadCommand(Instant.now(),
"https://doc.akka.io/docs/alpakka/current/s3.html");
- MqttMessage message = MqttMessage.create("downloads/trigger",
ByteString.fromString(downloadCommandWriter.writeValueAsString(command)));
-
- Source.tick(Duration.ofSeconds(5), Duration.ofSeconds(30),
message).runWith(mqttSink, system);
- }
-}
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/DownloadCommand.java
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/DownloadCommand.java
new file mode 100644
index 0000000..e014aef
--- /dev/null
+++
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/DownloadCommand.java
@@ -0,0 +1,23 @@
+package sample.triggereddownload;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.time.Instant;
+
+public final class DownloadCommand {
+
+ public final Instant timestamp;
+ public final String url;
+
+ @JsonCreator
+ public DownloadCommand(@JsonProperty("timestamp") Instant timestamp,
@JsonProperty("url") String url) {
+ this.timestamp = timestamp;
+ this.url = url;
+ }
+
+ @Override
+ public String toString() {
+ return "DownloadCommand(" + timestamp.toString() + ", url=" + url + ")";
+ }
+}
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/Main.java
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/Main.java
new file mode 100644
index 0000000..e7be87d
--- /dev/null
+++
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/Main.java
@@ -0,0 +1,87 @@
+package sample.triggereddownload;
+
+
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.ContentTypes;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.Uri;
+import org.apache.pekko.stream.connectors.mqtt.MqttConnectionSettings;
+import org.apache.pekko.stream.connectors.mqtt.MqttQoS;
+import org.apache.pekko.stream.connectors.mqtt.MqttSubscriptions;
+import org.apache.pekko.stream.connectors.mqtt.javadsl.MqttSource;
+import org.apache.pekko.stream.connectors.s3.javadsl.S3;
+import org.apache.pekko.stream.javadsl.Source;
+import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence;
+
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+
+public class Main {
+
+ final ActorSystem<Void> system;
+ final Http http;
+
+ public static void main(String[] args) throws Exception {
+ Main me = new Main();
+ me.run();
+ }
+
+ public Main() {
+ system = ActorSystem.create(Behaviors.empty(), "MqttHttpToS3");
+ http = Http.get(toClassic(system));
+ }
+
+ final ObjectMapper mapper = new ObjectMapper().registerModule(new
JavaTimeModule());
+ final ObjectReader downloadCommandReader =
mapper.readerFor(DownloadCommand.class);
+
+ final String mqttBroker = "tcp://localhost:1883";
+ // Remember to set up topic in MQTT server's acl config
+ final String topic = "downloads/trigger";
+ final String s3Bucket = "pekko.connectors.samples";
+
+ private String createS3BucketKey(DownloadCommand info) {
+ return LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME)
+ + Uri.create(info.url).getPathString().replace("/", "-");
+ }
+
+ void run() throws Exception {
+ final MqttConnectionSettings mqttConnectionSettings =
+ MqttConnectionSettings
+ .create(
+ mqttBroker,
+ "upload-control",
+ new MemoryPersistence()
+ );
+
+ MqttSubscriptions mqttSubscriptions =
+ MqttSubscriptions.create(topic, MqttQoS.atLeastOnce());
+
+ MqttSource
+ .atMostOnce(mqttConnectionSettings, mqttSubscriptions, 8)
+ .map(m -> m.payload().utf8String())
+ .<DownloadCommand>map(downloadCommandReader::readValue)
+ .mapAsync(4, info -> {
+ String s3BucketKey = createS3BucketKey(info);
+ return Source.single(info.url)
+ .map(HttpRequest::GET)
+ .mapAsync(1, http::singleRequest)
+ .flatMapConcat(httpResponse ->
httpResponse.entity().getDataBytes())
+ .runWith(S3.multipartUpload(s3Bucket, s3BucketKey,
ContentTypes.TEXT_HTML_UTF8), system);
+ }
+ )
+ .runForeach(System.out::println, system)
+ .exceptionally(e -> {
+ e.printStackTrace();
+ return Done.done();
+ });
+ }
+
+}
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/PublishDataToMqtt.java
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/PublishDataToMqtt.java
new file mode 100644
index 0000000..21b053b
--- /dev/null
+++
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/java/sample/triggereddownload/PublishDataToMqtt.java
@@ -0,0 +1,57 @@
+package sample.triggereddownload;
+
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.stream.connectors.mqtt.MqttConnectionSettings;
+import org.apache.pekko.stream.connectors.mqtt.MqttMessage;
+import org.apache.pekko.stream.connectors.mqtt.MqttQoS;
+import org.apache.pekko.stream.connectors.mqtt.javadsl.MqttSink;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
+import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence;
+
+import java.time.Duration;
+import java.time.Instant;
+import java.util.concurrent.CompletionStage;
+
+public class PublishDataToMqtt {
+
+ final ActorSystem<Void> system;
+ final Http http;
+
+ public static void main(String[] args) throws Exception {
+ PublishDataToMqtt me = new PublishDataToMqtt();
+ me.run();
+ }
+
+ public PublishDataToMqtt() {
+ system = ActorSystem.create(Behaviors.empty(), "PublishDataToMqTT");
+ http = Http.get(toClassic(system));
+ }
+
+ final ObjectMapper mapper = new ObjectMapper().registerModule(new
JavaTimeModule());
+ final ObjectWriter downloadCommandWriter =
mapper.writerFor(DownloadCommand.class);
+
+
+ void run() throws Exception {
+
+ final MqttConnectionSettings connectionSettings =
+ MqttConnectionSettings.create(
+ "tcp://localhost:1883", "test-java-client", new
MemoryPersistence());
+ Sink<MqttMessage, CompletionStage<Done>> mqttSink =
+ MqttSink.create(connectionSettings.withClientId("source-test/sink"),
MqttQoS.atLeastOnce());
+
+ DownloadCommand command = new DownloadCommand(Instant.now(),
"https://pekko.apache.org/docs/pekko-connectors/current/s3.html");
+ MqttMessage message = MqttMessage.create("downloads/trigger",
ByteString.fromString(downloadCommandWriter.writeValueAsString(command)));
+
+ Source.tick(Duration.ofSeconds(5), Duration.ofSeconds(30),
message).runWith(mqttSink, system);
+ }
+}
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/application.conf
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/application.conf
index df30fd1..b1f8c3d 100644
---
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/application.conf
+++
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/application.conf
@@ -1,10 +1,10 @@
-akka {
- loggers = ["akka.event.slf4j.Slf4jLogger"]
- logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
+pekko {
+ loggers = ["org.apache.pekko.event.slf4j.Slf4jLogger"]
+ logging-filter = "org.apache.pekko.event.slf4j.Slf4jLoggingFilter"
loglevel = "DEBUG"
}
-alpakka.s3 {
+pekko.connectors.s3 {
aws {
credentials {
# define AWS credentials in credentials.conf
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/credentials.conf-RENAME
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/credentials.conf-RENAME
index de204d2..4bee78c 100644
---
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/credentials.conf-RENAME
+++
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/credentials.conf-RENAME
@@ -1,4 +1,4 @@
-akka.stream.alpakka.s3 {
+pekko.stream.connectors.s3 {
aws {
credentials {
# define AWS credentials in credentials.conf
diff --git
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/logback.xml
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/logback.xml
index e7755d5..81a8f9e 100644
---
a/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/logback.xml
+++
b/pekko-connectors-sample-mqtt-http-to-s3-java/src/main/resources/logback.xml
@@ -6,7 +6,7 @@
</encoder>
</appender>
- <logger name="akka" level="WARN"/>
+ <logger name="org.apache.pekko" level="WARN"/>
<logger name="com.typesafe.sslconfig" level="WARN"/>
<root level="debug">
diff --git a/pekko-connectors-sample-mqtt-to-kafka/README.md
b/pekko-connectors-sample-mqtt-to-kafka/README.md
index 43f49a5..ee4354b 100644
--- a/pekko-connectors-sample-mqtt-to-kafka/README.md
+++ b/pekko-connectors-sample-mqtt-to-kafka/README.md
@@ -2,7 +2,7 @@
## Read from an MQTT topic, group messages and publish to Kafka
-Subscribe to an MQTT topic with @extref[Pekko-Connectors
MQTT](alpakka:/mqtt.html), group a few values and publish the aggregate to a
Kafka topic with @extref[Pekko-Connectors Kafka](pekko-connectors-kafka:).
+Subscribe to an MQTT topic with @extref[Pekko-Connectors
MQTT](pekko-connectors:/mqtt.html), group a few values and publish the
aggregate to a Kafka topic with @extref[Pekko-Connectors
Kafka](pekko-connectors-kafka:).
Browse the sources at
@link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-mqtt-to-kafka)
{ open=new }.
diff --git
a/pekko-connectors-sample-mqtt-to-kafka/docs/src/main/paradox/example.md
b/pekko-connectors-sample-mqtt-to-kafka/docs/src/main/paradox/example.md
index 8088a41..75c55c6 100644
--- a/pekko-connectors-sample-mqtt-to-kafka/docs/src/main/paradox/example.md
+++ b/pekko-connectors-sample-mqtt-to-kafka/docs/src/main/paradox/example.md
@@ -13,7 +13,7 @@ docker-compose.yml
### Restarting of the source
The MQTT source gets wrapped by a `RestartSource` to mitigate the
-@extref:[Paho initial connections problem](alpakka:/mqtt.html#settings).
+@extref:[Paho initial connections
problem](pekko-connectors:/mqtt.html#settings).
Java
: @@snip [snip](/src/main/java/samples/javadsl/Main.java) { #restarting }
diff --git a/pekko-connectors-sample-rotate-logs-to-ftp/README.md
b/pekko-connectors-sample-rotate-logs-to-ftp/README.md
index 5662c01..63b7374 100644
--- a/pekko-connectors-sample-rotate-logs-to-ftp/README.md
+++ b/pekko-connectors-sample-rotate-logs-to-ftp/README.md
@@ -2,7 +2,7 @@
## Rotate data stream over to multiple compressed files on SFTP server
-This example reads a stream of data and uses @extref[Pekko-Connectors
File](alpakka:file.html) `LogRotatorSink` to write multiple files which get
rotated triggered by a rotation function, the files are zipped in-flow and
written to an SFTP server with @extref[Pekko-Connectors FTP](alpakka:ftp.html).
+This example reads a stream of data and uses @extref[Pekko-Connectors
File](pekko-connectors:file.html) `LogRotatorSink` to write multiple files
which get rotated triggered by a rotation function, the files are zipped
in-flow and written to an SFTP server with @extref[Pekko-Connectors
FTP](pekko-connectors:ftp.html).
Browse the sources at
@link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-rotate-logs-to-ftp)
{ open=new }.
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]