This is an automated email from the ASF dual-hosted git repository.

fanningpj pushed a commit to branch http-csv-kafka
in repository 
https://gitbox.apache.org/repos/asf/incubator-pekko-connectors-samples.git

commit 0a94a9c327325ab6890a05829ce5cec029a061af
Author: PJ Fanning <[email protected]>
AuthorDate: Sun Dec 17 17:25:07 2023 +0100

    http-csv-to-kafka migrate
---
 NOTICE                                             |  2 +-
 docs/build.sbt                                     | 15 +++----
 docs/project/Dependencies.scala                    |  4 +-
 pekko-connectors-sample-ftp-to-file/README.md      |  2 +-
 .../README.md                                      |  4 +-
 .../build.sbt                                      |  2 +-
 .../common/src/main/resources/application.conf     |  6 +--
 .../common/src/main/resources/logback.xml          |  3 +-
 .../project/Dependencies.scala                     | 30 +++++++-------
 .../step_001_http_request/README.md                |  4 +-
 .../src/main/java/samples/javadsl/Main.java        | 16 ++++----
 .../src/main/scala/samples/Main.scala              | 18 ++++----
 .../step_002_extract_http_entity/README.md         |  2 +-
 .../src/main/java/samples/javadsl/Main.java        | 26 ++++++------
 .../src/main/scala/samples/Main.scala              | 20 ++++-----
 .../step_003_parse_csv/README.md                   |  2 +-
 .../src/main/java/samples/javadsl/Main.java        | 30 +++++++-------
 .../src/main/scala/samples/Main.scala              | 22 +++++-----
 .../src/main/java/samples/javadsl/Main.java        | 30 +++++++-------
 .../src/main/scala/samples/Main.scala              | 22 +++++-----
 .../step_005_cleanse_lines/README.md               |  2 +-
 .../src/main/java/samples/javadsl/Main.java        | 30 +++++++-------
 .../src/main/scala/samples/Main.scala              | 22 +++++-----
 .../step_006_coordinated_shutdown/README.md        |  2 +-
 .../src/main/java/samples/javadsl/Main.java        | 32 +++++++--------
 .../src/main/scala/samples/Main.scala              | 24 +++++------
 .../step_007_produce_to_kafka/README.md            |  2 +-
 .../src/main/java/samples/javadsl/Main.java        | 44 ++++++++++----------
 .../src/main/scala/samples/Main.scala              | 28 ++++++-------
 .../src/main/java/samples/javadsl/Main.java        | 48 +++++++++++-----------
 .../src/main/scala/samples/Main.scala              | 28 ++++++-------
 .../docs/src/main/paradox/to-http-get.md           |  2 +-
 .../README.md                                      |  2 +-
 .../docs/src/main/paradox/example.md               |  2 +-
 .../project/Dependencies.scala                     |  4 +-
 35 files changed, 264 insertions(+), 268 deletions(-)

diff --git a/NOTICE b/NOTICE
index 6c31a22..8ba10a5 100644
--- a/NOTICE
+++ b/NOTICE
@@ -7,5 +7,5 @@ The Apache Software Foundation (https://www.apache.org/).
 This product contains significant parts that were originally based on software 
from Lightbend (Akka <https://akka.io/>).
 Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
 
-Apache Pekko Connectors is derived from Alpakka Samples 4.0.x.
+Apache Pekko Connectors Samples is derived from Alpakka Samples.
 
diff --git a/docs/build.sbt b/docs/build.sbt
index 6f1b6a1..32be3c3 100644
--- a/docs/build.sbt
+++ b/docs/build.sbt
@@ -56,16 +56,13 @@ HttpCsvToKafka / paradoxProperties ++= Map(
   "javadoc.org.apache.pekko.kafka.base_url" -> "",
   "extref.pekko-connectors-kafka.base_url" -> 
s"https://pekko.apache.org/docs/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}/%s";,
   // Pekko
-  "scaladoc.akka.base_url" -> 
s"https://pekko.apache.org/api/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}";,
   "scaladoc.org.apache.pekko.base_url" -> 
s"https://pekko.apache.org/api/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}";,
-  "javadoc.akka.base_url" -> 
s"https://pekko.apache.org/japi/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}";,
   "javadoc.org.apache.pekko.base_url" -> 
s"https://pekko.apache.org/japi/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}";,
-  "extref.akka.base_url" -> 
s"https://pekko.apache.org/docs/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}/%s";,
   "extref.pekko.base_url" -> 
s"https://pekko.apache.org/docs/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}/%s";,
   // Pekko HTTP
-  "scaladoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/api/pekko-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}";,
-  "javadoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/japi/pekko-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}";,
-  "extref.pekko-http.base_url" -> 
s"https://pekko.apache.org/docs/pekko-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}/%s";,
+  "scaladoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/api/pekko-http/${Dependencies.HttpCsvToKafka.PekkoHttpVersion}";,
+  "javadoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/japi/pekko-http/${Dependencies.HttpCsvToKafka.PekkoHttpVersion}";,
+  "extref.pekko-http.base_url" -> 
s"https://pekko.apache.org/docs/pekko-http/${Dependencies.HttpCsvToKafka.PekkoHttpVersion}/%s";,
 )
 HttpCsvToKafka / paradoxGroups := Map("Language" -> Seq("Java", "Scala"))
 
@@ -163,9 +160,9 @@ KafkaToWebsocketClients / paradoxProperties ++= Map(
   "javadoc.org.apache.pekko.base_url" -> 
s"https://pekko.apache.org/japi/pekko/${Dependencies.KafkaToWebsocketClients.PekkoVersion}";,
   "extref.pekko.base_url" -> 
s"https://pekko.apache.org/docs/pekko/${Dependencies.KafkaToWebsocketClients.PekkoVersion}/%s";,
   // Pekko HTTP
-  "scaladoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/api/pekko-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}";,
-  "javadoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/japi/pekko-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}";,
-  "extref.pekko-http.base_url" -> 
s"https://pekko.apache.org/docs/pekko-http/${Dependencies.KafkaToWebsocketClients.AkkaHttpVersion}/%s";,
+  "scaladoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/api/pekko-http/${Dependencies.KafkaToWebsocketClients.PekkoHttpVersion}";,
+  "javadoc.org.apache.pekko.http.base_url" -> 
s"https://pekko.apache.org/japi/pekko-http/${Dependencies.KafkaToWebsocketClients.PekkoHttpVersion}";,
+  "extref.pekko-http.base_url" -> 
s"https://pekko.apache.org/docs/pekko-http/${Dependencies.KafkaToWebsocketClients.PekkoHttpVersion}/%s";,
 )
 KafkaToWebsocketClients / paradoxGroups := Map("Language" -> Seq("Java", 
"Scala"))
 
diff --git a/docs/project/Dependencies.scala b/docs/project/Dependencies.scala
index af5360b..5ae649d 100644
--- a/docs/project/Dependencies.scala
+++ b/docs/project/Dependencies.scala
@@ -29,7 +29,7 @@ object Dependencies {
     val ScalaVersion = versions("scalaVer")
     val ScalaTestVersion = versions("ScalaTestVersion")
     val PekkoVersion = versions("PekkoVersion")
-    val AkkaHttpVersion = versions("AkkaHttpVersion")
+    val PekkoHttpVersion = versions("PekkoHttpVersion")
     val PekkoConnectorsVersion = versions("PekkoConnectorsVersion")
     val PekkoConnectorsKafkaVersion = versions("PekkoConnectorsKafkaVersion")
   }
@@ -89,7 +89,7 @@ object Dependencies {
 
     val ScalaVersion = versions("scalaVer")
     val PekkoVersion = versions("PekkoVersion")
-    val AkkaHttpVersion = versions("AkkaHttpVersion")
+    val PekkoHttpVersion = versions("PekkoHttpVersion")
     val PekkoConnectorsKafkaVersion = versions("PekkoConnectorsKafkaVersion")
   }
 
diff --git a/pekko-connectors-sample-ftp-to-file/README.md 
b/pekko-connectors-sample-ftp-to-file/README.md
index 8392456..178085d 100644
--- a/pekko-connectors-sample-ftp-to-file/README.md
+++ b/pekko-connectors-sample-ftp-to-file/README.md
@@ -2,7 +2,7 @@
 
 ## Download all files an FTP server to local files
 
-This example uses @extref[Pekko Connectors FTP](pekko-connectors:ftp.html) to 
read from the FTP server, and stores files using Akka Stream `FileIO`.
+This example uses @extref[Pekko Connectors FTP](pekko-connectors:ftp.html) to 
read from the FTP server, and stores files using Pekko Stream `FileIO`.
 
 Browse the sources at 
@link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-ftp-to-file)
 { open=new }.
 
diff --git a/pekko-connectors-sample-http-csv-to-kafka/README.md 
b/pekko-connectors-sample-http-csv-to-kafka/README.md
index 88c5b6e..38b55d7 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/README.md
+++ b/pekko-connectors-sample-http-csv-to-kafka/README.md
@@ -1,8 +1,8 @@
 # Apache Pekko Connectors sample
 
-## Fetch CSV via Akka HTTP and publish the data as JSON to Kafka
+## Fetch CSV via Pekko HTTP and publish the data as JSON to Kafka
 
-This example uses @extref[Akka HTTP to send the HTTP 
request](pekko-http:client-side/connection-level.html#opening-http-connections) 
and Akka HTTPs primary JSON support via @extref[Spray 
JSON](pekko-http:common/json-support.html#spray-json-support) (for Scala) or 
Jackson JSON (for Java) to convert the map into a JSON structure which gets 
published to a Kafka topic.
+This example uses @extref[Pekko HTTP to send the HTTP 
request](pekko-http:client-side/connection-level.html#opening-http-connections) 
and Pekko HTTPs primary JSON support via @extref[Spray 
JSON](pekko-http:common/json-support.html#spray-json-support) (for Scala) or 
Jackson JSON (for Java) to convert the map into a JSON structure which gets 
published to a Kafka topic.
 
 Browse the sources at 
@link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-http-csv-to-kafka)
 { open=new }.
 
diff --git a/pekko-connectors-sample-http-csv-to-kafka/build.sbt 
b/pekko-connectors-sample-http-csv-to-kafka/build.sbt
index 2f21af6..8013348 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/build.sbt
+++ b/pekko-connectors-sample-http-csv-to-kafka/build.sbt
@@ -1,5 +1,5 @@
 
-lazy val alpakka_sample_master = project
+lazy val pekko_connectors_sample_master = project
   .in(file("."))
   .aggregate(
     common,
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/application.conf
 
b/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/application.conf
index f83f9d0..71a28ae 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/application.conf
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/application.conf
@@ -1,6 +1,6 @@
-akka {
-  loggers = ["akka.event.slf4j.Slf4jLogger"]
-  logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
+pekko {
+  loggers = ["org.apache.pekko.event.slf4j.Slf4jLogger"]
+  logging-filter = "org.apache.pekko.event.slf4j.Slf4jLoggingFilter"
   loglevel = "DEBUG"
 }
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/logback.xml
 
b/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/logback.xml
index cb745b2..7c4096b 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/logback.xml
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/common/src/main/resources/logback.xml
@@ -31,8 +31,7 @@
 
     <logger name="org.apache" level="WARN"/>
     <logger name="kafka" level="WARN"/>
-    <logger name="akka" level="WARN"/>
-    <logger name="akka.kafka.benchmarks" level="INFO"/>
+    <logger name="org.apache.pekko" level="WARN"/>
     <logger name="org.apache.kafka.common.utils.AppInfoParser" level="ERROR"/>
     <logger name="org.apache.kafka.clients.NetworkClient" level="ERROR"/>
     <logger name="org.I0Itec.zkclient" level="WARN"/>
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/project/Dependencies.scala 
b/pekko-connectors-sample-http-csv-to-kafka/project/Dependencies.scala
index 5277491..55b4d68 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/project/Dependencies.scala
+++ b/pekko-connectors-sample-http-csv-to-kafka/project/Dependencies.scala
@@ -3,27 +3,27 @@ import sbt._
 object Dependencies {
   val scalaVer = "2.13.12"
   // #dependencies
-  val ScalaTestVersion = "3.1.4"
-  val PekkoVersion = "2.6.19"
-  val AkkaHttpVersion = "10.1.12"
-  val PekkoConnectorsVersion = "4.0.0"
-  val PekkoConnectorsKafkaVersion = "3.0.1"
+  val ScalaTestVersion = "3.2.17"
+  val PekkoVersion = "1.0.2"
+  val PekkoHttpVersion = "1.0.0"
+  val PekkoConnectorsVersion = "1.0.1"
+  val PekkoConnectorsKafkaVersion = "1.0.0"
 
   val dependencies = List(
-    "com.lightbend.akka" %% "akka-stream-alpakka-csv" % PekkoConnectorsVersion,
-    "com.typesafe.akka" %% "akka-stream-kafka" % PekkoConnectorsKafkaVersion,
-    "com.typesafe.akka" %% "akka-actor-typed" % PekkoVersion,
-    "com.typesafe.akka" %% "akka-stream" % PekkoVersion,
-    "com.typesafe.akka" %% "akka-http" % AkkaHttpVersion,
+    "org.apache.pekko" %% "pekko-connectors-csv" % PekkoConnectorsVersion,
+    "org.apache.pekko" %% "pekko-connectors-kafka" % 
PekkoConnectorsKafkaVersion,
+    "org.apache.pekko" %% "pekko-actor-typed" % PekkoVersion,
+    "org.apache.pekko" %% "pekko-stream" % PekkoVersion,
+    "org.apache.pekko" %% "pekko-http" % PekkoHttpVersion,
     // Used from Scala
-    "com.typesafe.akka" %% "akka-http-spray-json" % AkkaHttpVersion,
+    "org.apache.pekko" %% "pekko-http-spray-json" % PekkoHttpVersion,
     // Used from Java
-    "com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8" % "2.11.4",
-    "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.11.4",
+    "com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8" % "2.14.3",
+    "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.14.3",
 
-    "org.testcontainers" % "kafka" % "1.14.3",
+    "org.testcontainers" % "kafka" % "1.17.3",
     
-    "com.typesafe.akka" %% "akka-slf4j" % PekkoVersion,
+    "org.apache.pekko" %% "pekko-slf4j" % PekkoVersion,
     "ch.qos.logback" % "logback-classic" % "1.2.13"
   )
   // #dependencies
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/README.md 
b/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/README.md
index 2fd10dc..879fe9a 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/README.md
+++ b/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/README.md
@@ -2,12 +2,12 @@
 
 ### Description
 
-This code uses Akka HTTP to request a file containing listed companies from 
the NASDAQ web site.
+This code uses Pekko HTTP to request a file containing listed companies from 
the NASDAQ web site.
 
 It starts the Actor System, imports the Actor System's dispatcher as 
`ExecutionContext`, and gets a stream materializer from the Actor System.
 
 The HTTP request is created as value (it will be sent multiple times in later 
steps) and sets a specific HTTP request header.
 
-The request is run in an Akka Stream from the single value, issuing the 
request by Akka HTTP, and printing out the HTTP response.
+The request is run in an Pekko Stream from the single value, issuing the 
request by Pekko HTTP, and printing out the HTTP response.
 
 Once the stream completes, the Actor System is terminated and the program 
exits.
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/java/samples/javadsl/Main.java
index 26a6a47..87ca0f3 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/java/samples/javadsl/Main.java
@@ -4,14 +4,14 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.ActorSystem;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.headers.Accept;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
 
 import java.util.Collections;
 import java.util.concurrent.CompletionStage;
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/scala/samples/Main.scala
index be7ab9a..07de110 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_001_http_request/src/main/scala/samples/Main.scala
@@ -4,15 +4,15 @@
 
 package samples
 
-import akka.Done
-import akka.actor._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.stream._
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.stream._
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.util.ByteString
 
 import scala.concurrent.Future
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/README.md
 
b/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/README.md
index ad43dd3..f60841c 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/README.md
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/README.md
@@ -2,4 +2,4 @@
 
 ### Description
 
-The HTTP response with status OK is expected and the contained HTTP entity is 
extracted. Instead of the HTTP response, the contained entity (page content) 
continues in the stream in the form of 
@scaladoc:[ByteString](akka.util.ByteString) elements.
+The HTTP response with status OK is expected and the contained HTTP entity is 
extracted. Instead of the HTTP response, the contained entity (page content) 
continues in the stream in the form of 
@scaladoc:[ByteString](org.apache.pekko.util.ByteString) elements.
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/java/samples/javadsl/Main.java
index d9e9207..75b8172 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/java/samples/javadsl/Main.java
@@ -4,23 +4,23 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.HttpResponse;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.StatusCodes;
-import akka.http.javadsl.model.headers.Accept;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.HttpResponse;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.StatusCodes;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
 
 import java.util.Collections;
 import java.util.concurrent.CompletionStage;
 
-import static akka.actor.typed.javadsl.Adapter.toClassic;
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
 
 public class Main {
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/scala/samples/Main.scala
index 52fd128..a2f059a 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_002_extract_http_entity/src/main/scala/samples/Main.scala
@@ -4,16 +4,16 @@
 
 package samples
 
-import akka.Done
-import akka.actor.typed.ActorSystem
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor.typed.ActorSystem
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.util.ByteString
 
 import scala.concurrent.Future
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md 
b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md
index cfc76cc..9783b6c 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md
+++ b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/README.md
@@ -2,4 +2,4 @@
 
 ### Description
 
-The binary data in @scaladoc:[ByteString](akka.util.ByteString)s is passed 
into @extref:[Pekko-Connectors 
CSV](pekko-connectors:data-transformations/csv.html) to be parsed and converted 
per line into a Map. The stream elements becomes a @scala[`Map[String, 
ByteString]`]@java[`Map<String, ByteString>`], one entry per column using the 
column headers as keys.
+The binary data in @scaladoc:[ByteString](org.apache.pekko.util.ByteString)s 
is passed into @extref:[Pekko-Connectors 
CSV](pekko-connectors:data-transformations/csv.html) to be parsed and converted 
per line into a Map. The stream elements becomes a @scala[`Map[String, 
ByteString]`]@java[`Map<String, ByteString>`], one entry per column using the 
column headers as keys.
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/java/samples/javadsl/Main.java
index 52e7fa6..f0f58a5 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/java/samples/javadsl/Main.java
@@ -4,26 +4,26 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.HttpResponse;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.StatusCodes;
-import akka.http.javadsl.model.headers.Accept;
-import akka.stream.alpakka.csv.javadsl.CsvParsing;
-import akka.stream.alpakka.csv.javadsl.CsvToMap;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.HttpResponse;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.StatusCodes;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvParsing;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvToMap;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
 
 import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.concurrent.CompletionStage;
 
-import static akka.actor.typed.javadsl.Adapter.toClassic;
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
 
 public class Main {
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/scala/samples/Main.scala
index f0e3811..7c1465f 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_003_parse_csv/src/main/scala/samples/Main.scala
@@ -4,17 +4,17 @@
 
 package samples
 
-import akka.Done
-import akka.actor.typed.ActorSystem
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.stream.alpakka.csv.scaladsl.{ CsvParsing, CsvToMap }
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor.typed.ActorSystem
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.stream.connectors.csv.scaladsl.{ CsvParsing, CsvToMap }
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.util.ByteString
 
 import scala.concurrent.Future
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/java/samples/javadsl/Main.java
index 2d54750..2ee28e9 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/java/samples/javadsl/Main.java
@@ -4,20 +4,20 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.HttpResponse;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.StatusCodes;
-import akka.http.javadsl.model.headers.Accept;
-import akka.stream.alpakka.csv.javadsl.CsvParsing;
-import akka.stream.alpakka.csv.javadsl.CsvToMap;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.HttpResponse;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.StatusCodes;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvParsing;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvToMap;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 
@@ -28,7 +28,7 @@ import java.util.Collections;
 import java.util.Map;
 import java.util.concurrent.CompletionStage;
 
-import static akka.actor.typed.javadsl.Adapter.toClassic;
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
 
 public class Main {
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/scala/samples/Main.scala
index 869fccc..15f5344 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_004_producing_json/src/main/scala/samples/Main.scala
@@ -4,17 +4,17 @@
 
 package samples
 
-import akka.Done
-import akka.actor.typed.ActorSystem
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.stream.alpakka.csv.scaladsl.{ CsvParsing, CsvToMap }
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor.typed.ActorSystem
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.stream.connectors.csv.scaladsl.{ CsvParsing, CsvToMap }
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.util.ByteString
 import spray.json.{ DefaultJsonProtocol, JsValue, JsonWriter }
 
 import scala.concurrent.Future
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/README.md 
b/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/README.md
index 25f58ee..330921b 100644
--- a/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/README.md
+++ b/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/README.md
@@ -2,4 +2,4 @@
 
 ### Description
 
-The CSV data we receive happens to contain an empty column without a header. 
The `cleanseCsvData` removes that column and turns the column values from 
@scaladoc:[ByteString](akka.util.ByteString)s into regular `String`s.
+The CSV data we receive happens to contain an empty column without a header. 
The `cleanseCsvData` removes that column and turns the column values from 
@scaladoc:[ByteString](org.apache.pekko.util.ByteString)s into regular 
`String`s.
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/java/samples/javadsl/Main.java
index 6bc7588..db5e066 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/java/samples/javadsl/Main.java
@@ -4,20 +4,20 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.HttpResponse;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.StatusCodes;
-import akka.http.javadsl.model.headers.Accept;
-import akka.stream.alpakka.csv.javadsl.CsvParsing;
-import akka.stream.alpakka.csv.javadsl.CsvToMap;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.HttpResponse;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.StatusCodes;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvParsing;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvToMap;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 
@@ -28,7 +28,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.CompletionStage;
 
-import static akka.actor.typed.javadsl.Adapter.toClassic;
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
 
 public class Main {
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/scala/samples/Main.scala
index 49abce9..b8a6ab5 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_005_cleanse_lines/src/main/scala/samples/Main.scala
@@ -4,17 +4,17 @@
 
 package samples
 
-import akka.Done
-import akka.actor.typed.ActorSystem
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.stream.alpakka.csv.scaladsl.{ CsvParsing, CsvToMap }
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor.typed.ActorSystem
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.stream.connectors.csv.scaladsl.{ CsvParsing, CsvToMap }
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.util.ByteString
 import spray.json.{ DefaultJsonProtocol, JsValue, JsonWriter }
 
 import scala.concurrent.Future
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
 
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
index 91b9fec..d2c8025 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/README.md
@@ -2,4 +2,4 @@
 
 ### Description
 
-To make sure Akka HTTP is shut down in a proper way it is added to Akka's 
@extref:[Coordinated shutdown](pekko:actors.html#coordinated-shutdown).
+To make sure Pekko HTTP is shut down in a proper way, it is added to Pekko's 
@extref:[Coordinated shutdown](pekko:actors.html#coordinated-shutdown).
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/java/samples/javadsl/Main.java
index 0b446c8..ba46e46 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/java/samples/javadsl/Main.java
@@ -4,21 +4,21 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.actor.CoordinatedShutdown;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.HttpResponse;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.StatusCodes;
-import akka.http.javadsl.model.headers.Accept;
-import akka.stream.alpakka.csv.javadsl.CsvParsing;
-import akka.stream.alpakka.csv.javadsl.CsvToMap;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.actor.CoordinatedShutdown;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.HttpResponse;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.StatusCodes;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvParsing;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvToMap;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 
@@ -29,7 +29,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.CompletionStage;
 
-import static akka.actor.typed.javadsl.Adapter.toClassic;
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
 
 public class Main {
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/scala/samples/Main.scala
index 606692a..70c01ff 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_006_coordinated_shutdown/src/main/scala/samples/Main.scala
@@ -4,18 +4,18 @@
 
 package samples
 
-import akka.Done
-import akka.actor.CoordinatedShutdown
-import akka.actor.typed.ActorSystem
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.stream.alpakka.csv.scaladsl.{ CsvParsing, CsvToMap }
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor.CoordinatedShutdown
+import org.apache.pekko.actor.typed.ActorSystem
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.stream.connectors.csv.scaladsl.{ CsvParsing, CsvToMap }
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.util.ByteString
 import spray.json.{ DefaultJsonProtocol, JsValue, JsonWriter }
 
 import scala.concurrent.Future
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/README.md 
b/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/README.md
index d220156..1d3225d 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/README.md
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/README.md
@@ -6,4 +6,4 @@
 
 @extref:[Pekko-Connectors Kafka](pekko-connectors-kafka:producer.html) 
producer settings specify the broker address and the data types for Kafka's key 
and value.
 
-@scaladoc:[Producer.plainSink](akka.kafka.scaladsl.Producer$) sends the 
`ProducerRecord`s stream elements to the specified Kafka topic.
+@scaladoc:[Producer.plainSink](org.apache.pekko.kafka.scaladsl.Producer$) 
sends the `ProducerRecord`s stream elements to the specified Kafka topic.
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/java/samples/javadsl/Main.java
index e6d6e9c..9e04675 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/java/samples/javadsl/Main.java
@@ -4,27 +4,27 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.actor.CoordinatedShutdown;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.HttpResponse;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.StatusCodes;
-import akka.http.javadsl.model.headers.Accept;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.ProducerSettings;
-import akka.kafka.Subscriptions;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
-import akka.stream.alpakka.csv.javadsl.CsvParsing;
-import akka.stream.alpakka.csv.javadsl.CsvToMap;
-import akka.stream.javadsl.Keep;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.actor.CoordinatedShutdown;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.HttpResponse;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.StatusCodes;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.ProducerSettings;
+import org.apache.pekko.kafka.Subscriptions;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvParsing;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvToMap;
+import org.apache.pekko.stream.javadsl.Keep;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -41,7 +41,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.CompletionStage;
 
-import static akka.actor.typed.javadsl.Adapter.toClassic;
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
 
 public class Main {
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/scala/samples/Main.scala
index acb7c42..a356806 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_007_produce_to_kafka/src/main/scala/samples/Main.scala
@@ -4,20 +4,20 @@
 
 package samples
 
-import akka.Done
-import akka.actor.CoordinatedShutdown
-import akka.actor.typed.ActorSystem
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.kafka.scaladsl.{ Consumer, Producer }
-import akka.kafka.{ ConsumerSettings, ProducerSettings, Subscriptions }
-import akka.stream.alpakka.csv.scaladsl.{ CsvParsing, CsvToMap }
-import akka.stream.scaladsl.{ Keep, Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor.CoordinatedShutdown
+import org.apache.pekko.actor.typed.ActorSystem
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer }
+import org.apache.pekko.kafka.{ ConsumerSettings, ProducerSettings, 
Subscriptions }
+import org.apache.pekko.stream.connectors.csv.scaladsl.{ CsvParsing, CsvToMap }
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.util.ByteString
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.serialization.{ StringDeserializer, 
StringSerializer }
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/java/samples/javadsl/Main.java
 
b/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/java/samples/javadsl/Main.java
index 935a60e..230c375 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/java/samples/javadsl/Main.java
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/java/samples/javadsl/Main.java
@@ -4,29 +4,29 @@
 
 package samples.javadsl;
 
-import akka.Done;
-import akka.actor.Cancellable;
-import akka.actor.CoordinatedShutdown;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.http.javadsl.Http;
-import akka.http.javadsl.model.HttpRequest;
-import akka.http.javadsl.model.HttpResponse;
-import akka.http.javadsl.model.MediaRanges;
-import akka.http.javadsl.model.StatusCodes;
-import akka.http.javadsl.model.headers.Accept;
-import akka.japi.Pair;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.ProducerSettings;
-import akka.kafka.Subscriptions;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
-import akka.stream.alpakka.csv.javadsl.CsvParsing;
-import akka.stream.alpakka.csv.javadsl.CsvToMap;
-import akka.stream.javadsl.Keep;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.util.ByteString;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.Cancellable;
+import org.apache.pekko.actor.CoordinatedShutdown;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.http.javadsl.Http;
+import org.apache.pekko.http.javadsl.model.HttpRequest;
+import org.apache.pekko.http.javadsl.model.HttpResponse;
+import org.apache.pekko.http.javadsl.model.MediaRanges;
+import org.apache.pekko.http.javadsl.model.StatusCodes;
+import org.apache.pekko.http.javadsl.model.headers.Accept;
+import org.apache.pekko.japi.Pair;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.ProducerSettings;
+import org.apache.pekko.kafka.Subscriptions;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvParsing;
+import org.apache.pekko.stream.connectors.csv.javadsl.CsvToMap;
+import org.apache.pekko.stream.javadsl.Keep;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.util.ByteString;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -45,7 +45,7 @@ import java.util.Map;
 import java.util.concurrent.CompletionStage;
 import java.util.concurrent.TimeUnit;
 
-import static akka.actor.typed.javadsl.Adapter.toClassic;
+import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;
 
 public class Main {
 
diff --git 
a/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/scala/samples/Main.scala
 
b/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/scala/samples/Main.scala
index 7297628..d76af96 100644
--- 
a/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/scala/samples/Main.scala
+++ 
b/pekko-connectors-sample-http-csv-to-kafka/step_008_scheduled_download/src/main/scala/samples/Main.scala
@@ -6,20 +6,20 @@ package samples
 
 import java.util.concurrent.TimeUnit
 
-import akka.Done
-import akka.actor.{ CoordinatedShutdown, Cancellable }
-import akka.actor.typed.ActorSystem
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.http.scaladsl._
-import akka.http.scaladsl.model.StatusCodes._
-import akka.http.scaladsl.model.headers.Accept
-import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
-import akka.kafka.scaladsl.{ Consumer, Producer }
-import akka.kafka.{ ConsumerSettings, ProducerSettings, Subscriptions }
-import akka.stream.alpakka.csv.scaladsl.{ CsvParsing, CsvToMap }
-import akka.stream.scaladsl.{ Keep, Sink, Source }
-import akka.util.ByteString
+import org.apache.pekko.Done
+import org.apache.pekko.actor.{ CoordinatedShutdown, Cancellable }
+import org.apache.pekko.actor.typed.ActorSystem
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.http.scaladsl._
+import org.apache.pekko.http.scaladsl.model.StatusCodes._
+import org.apache.pekko.http.scaladsl.model.headers.Accept
+import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, 
MediaRanges }
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer }
+import org.apache.pekko.kafka.{ ConsumerSettings, ProducerSettings, 
Subscriptions }
+import org.apache.pekko.stream.connectors.csv.scaladsl.{ CsvParsing, CsvToMap }
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.util.ByteString
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.serialization.{ StringDeserializer, 
StringSerializer }
diff --git a/pekko-connectors-sample-jms/docs/src/main/paradox/to-http-get.md 
b/pekko-connectors-sample-jms/docs/src/main/paradox/to-http-get.md
index 8800b16..d25325b 100644
--- a/pekko-connectors-sample-jms/docs/src/main/paradox/to-http-get.md
+++ b/pekko-connectors-sample-jms/docs/src/main/paradox/to-http-get.md
@@ -3,7 +3,7 @@
 - listens to the JMS queue "test" receiving `String`s (1),
 - converts incoming data to `org.apache.pekko.util.ByteString` (2),
 - puts the received text into an `HttpRequest` (3),
-- sends the created request via Akka Http (4),
+- sends the created request via Pekko Http (4),
 - prints the `HttpResponse` to standard out (5).
 
 Scala
diff --git a/pekko-connectors-sample-kafka-to-websocket-clients/README.md 
b/pekko-connectors-sample-kafka-to-websocket-clients/README.md
index 6ccf074..ba6f199 100644
--- a/pekko-connectors-sample-kafka-to-websocket-clients/README.md
+++ b/pekko-connectors-sample-kafka-to-websocket-clients/README.md
@@ -2,7 +2,7 @@
 
 ## Read from a Kafka topic and push the data to connected websocket clients
 
-Clients may connect via websockets and will receive data read from a Kafka 
topic. The websockets are implemented in @extref[Akka HTTP](pekko-http:) and 
[Pekko-Connectors Kafka](pekko-connectors-kafka:) subscribes to the Kafka topic.
+Clients may connect via websockets and will receive data read from a Kafka 
topic. The websockets are implemented in @extref[Pekko HTTP](pekko-http:) and 
[Pekko-Connectors Kafka](pekko-connectors-kafka:) subscribes to the Kafka topic.
 
 Browse the sources at 
@link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-kafka-to-websocket-clients)
 { open=new }.
 
diff --git 
a/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
 
b/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
index ab84142..929d098 100644
--- 
a/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
+++ 
b/pekko-connectors-sample-kafka-to-websocket-clients/docs/src/main/paradox/example.md
@@ -16,7 +16,7 @@ This `websocketHandler` is a `Flow` which will be used when a 
websocket client c
 Java
 : @@snip [snip](/src/main/java/samples/javadsl/Main.java) { #websocket-handler 
}
 
-### Akka HTTP routes
+### Pekko HTTP routes
 
 This example code uses two routes
 * `/events` which opens a websocket to subscribe to the messages from the 
Kafka topic
diff --git 
a/pekko-connectors-sample-kafka-to-websocket-clients/project/Dependencies.scala 
b/pekko-connectors-sample-kafka-to-websocket-clients/project/Dependencies.scala
index 3e56eb4..fe5a4b3 100644
--- 
a/pekko-connectors-sample-kafka-to-websocket-clients/project/Dependencies.scala
+++ 
b/pekko-connectors-sample-kafka-to-websocket-clients/project/Dependencies.scala
@@ -4,13 +4,13 @@ object Dependencies {
   val scalaVer = "2.13.12"
   // #deps
   val PekkoVersion = "2.6.19"
-  val AkkaHttpVersion = "10.1.12"
+  val PekkoHttpVersion = "10.1.12"
   val PekkoConnectorsKafkaVersion = "3.0.1"
   // #deps
   val dependencies = List(
     // #deps
     "com.typesafe.akka" %% "akka-stream" % PekkoVersion,
-    "com.typesafe.akka" %% "akka-http" % AkkaHttpVersion,
+    "com.typesafe.akka" %% "akka-http" % PekkoHttpVersion,
     "com.typesafe.akka" %% "akka-stream-kafka" % PekkoConnectorsKafkaVersion,
 
     // Logging


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to