[kafka] branch trunk updated (7ec10ce19a -> 8380d2edf4)
This is an automated email from the ASF dual-hosted git repository. guozhang pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/kafka.git from 7ec10ce19a HOTFIX: fix PriorityQueue iteration to assign warmups in priority order (#12585) add 8380d2edf4 KAFKA-10199: Handle exceptions from state updater (#12519) No new revisions were added by this update. Summary of changes: .../processor/internals/DefaultStateUpdater.java | 10 ++ .../streams/processor/internals/TaskManager.java | 191 + .../kafka/streams/processor/internals/Tasks.java | 53 ++ .../streams/processor/internals/TasksRegistry.java | 6 +- .../processor/internals/TaskManagerTest.java | 95 +- 5 files changed, 232 insertions(+), 123 deletions(-)
[kafka] branch abort-previous-builds-pr updated (259c396aa8 -> 4cb5e745ea)
This is an automated email from the ASF dual-hosted git repository. ijuma pushed a change to branch abort-previous-builds-pr in repository https://gitbox.apache.org/repos/asf/kafka.git from 259c396aa8 Tweak add 4cb5e745ea Fix No new revisions were added by this update. Summary of changes: Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-)
[kafka] branch abort-previous-builds-pr updated (4f8cc93d25 -> 259c396aa8)
This is an automated email from the ASF dual-hosted git repository. ijuma pushed a change to branch abort-previous-builds-pr in repository https://gitbox.apache.org/repos/asf/kafka.git from 4f8cc93d25 MINOR: Enable "abort previous builds" for PRs add 259c396aa8 Tweak No new revisions were added by this update. Summary of changes: Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-)
[kafka] 01/01: MINOR: Enable "abort previous builds" for PRs
This is an automated email from the ASF dual-hosted git repository. ijuma pushed a commit to branch abort-previous-builds-pr in repository https://gitbox.apache.org/repos/asf/kafka.git commit 4f8cc93d258d725b1fd2df06b93f474dc8fdc414 Author: Ismael Juma AuthorDate: Fri Sep 2 15:12:11 2022 -0700 MINOR: Enable "abort previous builds" for PRs --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index e1b4c5b3a1..00c5bb8e94 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -96,7 +96,7 @@ pipeline { agent none options { -disableConcurrentBuilds() +disableConcurrentBuilds(abortPrevious: changeRequest()) } stages {
[kafka] branch abort-previous-builds-pr created (now 4f8cc93d25)
This is an automated email from the ASF dual-hosted git repository. ijuma pushed a change to branch abort-previous-builds-pr in repository https://gitbox.apache.org/repos/asf/kafka.git at 4f8cc93d25 MINOR: Enable "abort previous builds" for PRs This branch includes the following new commits: new 4f8cc93d25 MINOR: Enable "abort previous builds" for PRs The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[kafka] annotated tag 3.1.2-rc0 updated (f8c67dc3ae -> 20312e60b8)
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a change to annotated tag 3.1.2-rc0 in repository https://gitbox.apache.org/repos/asf/kafka.git *** WARNING: tag 3.1.2-rc0 was modified! *** from f8c67dc3ae (commit) to 20312e60b8 (tag) tagging f8c67dc3ae0a3265855e98897255dd16a78a0452 (commit) replaces 3.1.0 by Mickael Maison on Fri Sep 2 18:29:07 2022 +0200 - Log - 3.1.2-rc0 --- No new revisions were added by this update. Summary of changes:
[kafka] 02/04: MINOR: Add configurable max receive size for SASL authentication requests
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.1 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 50cb087e03e878555144b1c41d1d0b84f640ac90 Author: Manikumar Reddy AuthorDate: Mon May 16 19:25:02 2022 +0530 MINOR: Add configurable max receive size for SASL authentication requests This adds a new configuration `sasl.server.max.receive.size` that sets the maximum receive size for requests before and during authentication. Reviewers: Tom Bentley , Mickael Maison Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 2 + .../config/internals/BrokerSecurityConfigs.java| 6 +++ .../authenticator/SaslServerAuthenticator.java | 16 ++-- .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 ++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +-- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 ++ .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + 8 files changed, 77 insertions(+), 7 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index 43ccfebad9..2e5e00813d 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -32,6 +32,8 @@ files="(Fetcher|Sender|SenderTest|ConsumerCoordinator|KafkaConsumer|KafkaProducer|Utils|TransactionManager|TransactionManagerTest|KafkaAdminClient|NetworkClient|Admin|KafkaRaftClient|KafkaRaftClientTest|RaftClientTestContext).java"/> + { - invocation.getArgument(0).putInt(SaslServerAuthenticator.MAX_RECEIVE_SIZE + 1); + invocation.getArgument(0).putInt(BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE + 1); return 4; }); -assertThrows(InvalidReceiveException.class, authenticator::authenticate); +assertThrows(SaslAuthenticationException.class, authenticator::authenticate); verify(transportLayer).read(any(ByteBuffer.class)); } diff --git a/core/src/main/scala/kafka/server/KafkaConfig.scala b/core/src/main/scala/kafka/server/KafkaConfig.scala index 15c5d09502..09b2bc5dea 100755 --- a/core/src/main/scala/kafka/server/KafkaConfig.scala +++ b/core/src/main/scala/kafka/server/KafkaConfig.scala @@ -255,6 +255,7 @@ object Defaults { /** * General Security configuration ***/ val ConnectionsMaxReauthMsDefault = 0L + val DefaultServerMaxMaxReceiveSize = BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE val DefaultPrincipalSerde = classOf[DefaultKafkaPrincipalBuilder] /** * Sasl configuration ***/ @@ -559,6 +560,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassProp = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_CONFIG val ConnectionsMaxReauthMsProp = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS + val SaslServerMaxReceiveSizeProp = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_CONFIG val securityProviderClassProp = SecurityConfig.SECURITY_PROVIDERS_CONFIG /** * SSL Configuration / @@ -987,6 +989,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassDoc = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_DOC val ConnectionsMaxReauthMsDoc = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS_DOC + val SaslServerMaxReceiveSizeDoc = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_DOC val securityProviderClassDoc = SecurityConfig.SECURITY_PROVIDERS_DOC /** * SSL Configuration / @@ -1291,6 +1294,7 @@ object KafkaConfig { /** * General Security Configuration / .define(ConnectionsMaxReauthMsProp, LONG, Defaults.ConnectionsMaxReauthMsDefault, MEDIUM, ConnectionsMaxReauthMsDoc) + .define(SaslServerMaxReceiveSizeProp, INT, Defaults.DefaultServerMaxMaxReceiveSize, MEDIUM, SaslServerMaxReceiveSizeDoc) .define(securityProviderClassProp, STRING, null, LOW, securityProviderClassDoc) /** * SSL Configuration / diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala index 2ce336e4ca..946effe782 100755 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala @@ -944,6 +944,8 @@ class KafkaConfigTest { case KafkaConfig.KafkaMetricsReporterClassesProp => // ignore case KafkaConfig.KafkaMetricsPollingIntervalSecondsProp => //ignore +case KafkaConfig.SaslServerMaxReceiveSizeProp => assertPropertyInvalid(baseProperties, name, "not_a_number") + // Raft Quorum Configs case
[kafka] branch 3.1 updated (0717f5b231 -> 29067a8418)
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a change to branch 3.1 in repository https://gitbox.apache.org/repos/asf/kafka.git from 0717f5b231 MINOR: Add note on IDEMPOTENT_WRITE ACL to notable changes (#12260) new c129566276 MINOR: Add more validation during KRPC deserialization new 50cb087e03 MINOR: Add configurable max receive size for SASL authentication requests new 32ffdd7fc8 MINOR: Bump version in upgrade guide to 3.1.2 new 29067a8418 MINOR: Update LICENSE-binary The 4 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: LICENSE-binary | 20 +-- checkstyle/suppressions.xml| 6 + .../config/internals/BrokerSecurityConfigs.java| 6 + .../kafka/common/protocol/ByteBufferAccessor.java | 9 +- .../common/protocol/DataInputStreamReadable.java | 139 - .../org/apache/kafka/common/protocol/Readable.java | 8 +- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../authenticator/SaslServerAuthenticator.java | 16 ++- .../common/message/SimpleArraysMessageTest.java| 54 .../common/protocol/ByteBufferAccessorTest.java| 58 + .../kafka/common/record/DefaultRecordTest.java | 14 +++ .../kafka/common/requests/RequestContextTest.java | 83 .../kafka/common/requests/RequestResponseTest.java | 92 ++ .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 +++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +- .../common/message/SimpleArraysMessage.json} | 15 ++- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 + .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + docs/upgrade.html | 2 +- .../apache/kafka/message/MessageDataGenerator.java | 9 +- .../kafka/raft/internals/RecordsIterator.java | 105 +++- .../apache/kafka/raft/internals/StringSerde.java | 3 +- 25 files changed, 502 insertions(+), 211 deletions(-) delete mode 100644 clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java create mode 100644 clients/src/test/java/org/apache/kafka/common/message/SimpleArraysMessageTest.java create mode 100644 clients/src/test/java/org/apache/kafka/common/protocol/ByteBufferAccessorTest.java copy clients/src/{main/resources/common/message/ResponseHeader.json => test/resources/common/message/SimpleArraysMessage.json} (71%)
[kafka] 03/04: MINOR: Bump version in upgrade guide to 3.1.2
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.1 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 32ffdd7fc8e2e16dc210706778926fcb68441524 Author: Mickael Maison AuthorDate: Fri Sep 2 17:05:49 2022 +0200 MINOR: Bump version in upgrade guide to 3.1.2 --- docs/upgrade.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/upgrade.html b/docs/upgrade.html index d6c07306d3..ddf1924f92 100644 --- a/docs/upgrade.html +++ b/docs/upgrade.html @@ -19,7 +19,7 @@
[kafka] 04/04: MINOR: Update LICENSE-binary
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.1 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 29067a84181f3cd5c1e58203ac454c7510daed56 Author: Mickael Maison AuthorDate: Fri Sep 2 17:17:30 2022 +0200 MINOR: Update LICENSE-binary --- LICENSE-binary | 20 ++-- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 7a15a1fca6..a92e0b5128 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -219,16 +219,16 @@ jackson-module-jaxb-annotations-2.12.6 jackson-module-scala_2.13-2.12.6 jakarta.validation-api-2.0.2 javassist-3.27.0-GA -jetty-client-9.4.44.v20210927 -jetty-continuation-9.4.44.v20210927 -jetty-http-9.4.44.v20210927 -jetty-io-9.4.44.v20210927 -jetty-security-9.4.44.v20210927 -jetty-server-9.4.44.v20210927 -jetty-servlet-9.4.44.v20210927 -jetty-servlets-9.4.44.v20210927 -jetty-util-9.4.44.v20210927 -jetty-util-ajax-9.4.44.v20210927 +jetty-client-9.4.48.v20220622 +jetty-continuation-9.4.48.v20220622 +jetty-http-9.4.48.v20220622 +jetty-io-9.4.48.v20220622 +jetty-security-9.4.48.v20220622 +jetty-server-9.4.48.v20220622 +jetty-servlet-9.4.48.v20220622 +jetty-servlets-9.4.48.v20220622 +jetty-util-9.4.48.v20220622 +jetty-util-ajax-9.4.48.v20220622 jersey-common-2.34 jersey-server-2.34 jose4j-0.7.8
[kafka] 01/04: MINOR: Add more validation during KRPC deserialization
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.1 in repository https://gitbox.apache.org/repos/asf/kafka.git commit c1295662768e64b4467e27c3d5158f95f2307657 Author: Colin Patrick McCabe AuthorDate: Fri May 20 15:23:12 2022 -0700 MINOR: Add more validation during KRPC deserialization When deserializing KRPC (which is used for RPCs sent to Kafka, Kafka Metadata records, and some other things), check that we have at least N bytes remaining before allocating an array of size N. Remove DataInputStreamReadable since it was hard to make this class aware of how many bytes were remaining. Instead, when reading an individual record in the Raft layer, simply create a ByteBufferAccessor with a ByteBuffer containing just the bytes we're interested in. Add SimpleArraysMessageTest and ByteBufferAccessorTest. Also add some additional tests in RequestResponseTest. Reviewers: Tom Bentley , Mickael Maison , Colin McCabe Co-authored-by: Colin McCabe Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 4 + .../kafka/common/protocol/ByteBufferAccessor.java | 9 +- .../common/protocol/DataInputStreamReadable.java | 139 - .../org/apache/kafka/common/protocol/Readable.java | 8 +- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../common/message/SimpleArraysMessageTest.java| 54 .../common/protocol/ByteBufferAccessorTest.java| 58 + .../kafka/common/record/DefaultRecordTest.java | 14 +++ .../kafka/common/requests/RequestContextTest.java | 83 .../kafka/common/requests/RequestResponseTest.java | 92 ++ .../common/message/SimpleArraysMessage.json| 29 + .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../apache/kafka/message/MessageDataGenerator.java | 9 +- .../kafka/raft/internals/RecordsIterator.java | 105 +++- .../apache/kafka/raft/internals/StringSerde.java | 3 +- 16 files changed, 434 insertions(+), 187 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index c1f9b340dd..43ccfebad9 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -150,6 +150,10 @@ + + + diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java index bd0925d6db..f643f5b577 100644 --- a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java +++ b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java @@ -54,8 +54,15 @@ public class ByteBufferAccessor implements Readable, Writable { } @Override -public void readArray(byte[] arr) { +public byte[] readArray(int size) { +int remaining = buf.remaining(); +if (size > remaining) { +throw new RuntimeException("Error reading byte array of " + size + " byte(s): only " + remaining + +" byte(s) available"); +} +byte[] arr = new byte[size]; buf.get(arr); +return arr; } @Override diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java b/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java deleted file mode 100644 index 70ed52d6a0..00 --- a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - *http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.kafka.common.protocol; - -import org.apache.kafka.common.utils.ByteUtils; - -import java.io.Closeable; -import java.io.DataInputStream; -import java.io.IOException; -import java.nio.ByteBuffer; - -public class DataInputStreamReadable implements Readable, Closeable { -protected final DataInputStream input; - -public DataInputStreamReadable(DataInputStream input) { -
[kafka] annotated tag 2.8.2-rc0 updated (3146c6ff4a -> 950a7a9348)
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a change to annotated tag 2.8.2-rc0 in repository https://gitbox.apache.org/repos/asf/kafka.git *** WARNING: tag 2.8.2-rc0 was modified! *** from 3146c6ff4a (commit) to 950a7a9348 (tag) tagging 3146c6ff4a24cc24d6039037e99edc8930d1958a (commit) replaces 2.8.1 by Manikumar Reddy on Fri Sep 2 20:18:27 2022 +0530 - Log - 2.8.2-rc0 --- No new revisions were added by this update. Summary of changes:
[kafka] annotated tag 3.0.2-rc0 updated (25b1aea02e -> c8d0ffaac1)
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a change to annotated tag 3.0.2-rc0 in repository https://gitbox.apache.org/repos/asf/kafka.git *** WARNING: tag 3.0.2-rc0 was modified! *** from 25b1aea02e (commit) to c8d0ffaac1 (tag) tagging 25b1aea02e37da1457026f6ac82a66f81c878e45 (commit) replaces 3.0.1 by Tom Bentley on Fri Sep 2 15:41:37 2022 +0100 - Log - 3.0.2-rc0 --- No new revisions were added by this update. Summary of changes:
[kafka] annotated tag 3.2.2-rc0 updated (38c22ad893 -> a661970322)
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a change to annotated tag 3.2.2-rc0 in repository https://gitbox.apache.org/repos/asf/kafka.git *** WARNING: tag 3.2.2-rc0 was modified! *** from 38c22ad893 (commit) to a661970322 (tag) tagging 38c22ad893fb6cf52b0e139e8db62320f8580ac2 (commit) replaces 3.2.1 by Mickael Maison on Fri Sep 2 16:04:00 2022 +0200 - Log - 3.2.2-rc0 --- No new revisions were added by this update. Summary of changes:
[kafka] 01/06: MINOR: Add more validation during KRPC deserialization
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a commit to branch 3.0 in repository https://gitbox.apache.org/repos/asf/kafka.git commit aaceb6b79bfcb1d32874ccdbc8f3138d1c1c00fb Author: Colin Patrick McCabe AuthorDate: Fri May 20 15:23:12 2022 -0700 MINOR: Add more validation during KRPC deserialization When deserializing KRPC (which is used for RPCs sent to Kafka, Kafka Metadata records, and some other things), check that we have at least N bytes remaining before allocating an array of size N. Remove DataInputStreamReadable since it was hard to make this class aware of how many bytes were remaining. Instead, when reading an individual record in the Raft layer, simply create a ByteBufferAccessor with a ByteBuffer containing just the bytes we're interested in. Add SimpleArraysMessageTest and ByteBufferAccessorTest. Also add some additional tests in RequestResponseTest. Reviewers: Tom Bentley , Mickael Maison , Colin McCabe Co-authored-by: Colin McCabe Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 4 + .../kafka/common/protocol/ByteBufferAccessor.java | 9 +- .../common/protocol/DataInputStreamReadable.java | 139 - .../org/apache/kafka/common/protocol/Readable.java | 8 +- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../common/message/SimpleArraysMessageTest.java| 54 .../common/protocol/ByteBufferAccessorTest.java| 58 + .../kafka/common/record/DefaultRecordTest.java | 14 +++ .../kafka/common/requests/RequestContextTest.java | 83 .../kafka/common/requests/RequestResponseTest.java | 92 ++ .../common/message/SimpleArraysMessage.json| 29 + .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../apache/kafka/message/MessageDataGenerator.java | 9 +- .../kafka/raft/internals/RecordsIterator.java | 105 +++- .../apache/kafka/raft/internals/StringSerde.java | 3 +- 16 files changed, 434 insertions(+), 187 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index b7a7192754..69c9374f5b 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -150,6 +150,10 @@ + + + diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java index bd0925d6db..f643f5b577 100644 --- a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java +++ b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java @@ -54,8 +54,15 @@ public class ByteBufferAccessor implements Readable, Writable { } @Override -public void readArray(byte[] arr) { +public byte[] readArray(int size) { +int remaining = buf.remaining(); +if (size > remaining) { +throw new RuntimeException("Error reading byte array of " + size + " byte(s): only " + remaining + +" byte(s) available"); +} +byte[] arr = new byte[size]; buf.get(arr); +return arr; } @Override diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java b/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java deleted file mode 100644 index 70ed52d6a0..00 --- a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - *http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.kafka.common.protocol; - -import org.apache.kafka.common.utils.ByteUtils; - -import java.io.Closeable; -import java.io.DataInputStream; -import java.io.IOException; -import java.nio.ByteBuffer; - -public class DataInputStreamReadable implements Readable, Closeable { -protected final DataInputStream input; - -public DataInputStreamReadable(DataInputStream input) { -
[kafka] 06/06: KAFKA-10712; Update release scripts to Python3 (#11538)
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a commit to branch 3.0 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 114280878709833410981c986bc4f76a34a008af Author: David Jacot AuthorDate: Mon Nov 29 10:42:14 2021 +0100 KAFKA-10712; Update release scripts to Python3 (#11538) Reviewers: Mickael Maison --- release.py | 49 - release_notes.py | 29 +++-- 2 files changed, 39 insertions(+), 39 deletions(-) diff --git a/release.py b/release.py index f43e244b0e..456d60cd56 100755 --- a/release.py +++ b/release.py @@ -51,8 +51,6 @@ release.py release-email """ -from __future__ import print_function - import datetime from getpass import getpass import json @@ -98,25 +96,25 @@ def print_output(output): print(">", line) def cmd(action, cmd_arg, *args, **kwargs): -if isinstance(cmd_arg, basestring) and not kwargs.get("shell", False): +if isinstance(cmd_arg, str) and not kwargs.get("shell", False): cmd_arg = cmd_arg.split() allow_failure = kwargs.pop("allow_failure", False) num_retries = kwargs.pop("num_retries", 0) stdin_log = "" -if "stdin" in kwargs and isinstance(kwargs["stdin"], basestring): +if "stdin" in kwargs and isinstance(kwargs["stdin"], str): stdin_log = "--> " + kwargs["stdin"] stdin = tempfile.TemporaryFile() -stdin.write(kwargs["stdin"]) +stdin.write(kwargs["stdin"].encode('utf-8')) stdin.seek(0) kwargs["stdin"] = stdin print(action, cmd_arg, stdin_log) try: output = subprocess.check_output(cmd_arg, *args, stderr=subprocess.STDOUT, **kwargs) -print_output(output) +print_output(output.decode('utf-8')) except subprocess.CalledProcessError as e: -print_output(e.output) +print_output(e.output.decode('utf-8')) if num_retries > 0: kwargs['num_retries'] = num_retries - 1 @@ -136,9 +134,9 @@ def cmd(action, cmd_arg, *args, **kwargs): def cmd_output(cmd, *args, **kwargs): -if isinstance(cmd, basestring): +if isinstance(cmd, str): cmd = cmd.split() -return subprocess.check_output(cmd, *args, stderr=subprocess.STDOUT, **kwargs) +return subprocess.check_output(cmd, *args, stderr=subprocess.STDOUT, **kwargs).decode('utf-8') def replace(path, pattern, replacement): updated = [] @@ -161,7 +159,7 @@ def regexReplace(path, pattern, replacement): f.write(line) def user_ok(msg): -ok = raw_input(msg) +ok = input(msg) return ok.strip().lower() == 'y' def sftp_mkdir(dir): @@ -204,13 +202,14 @@ def get_jdk(prefs, version): """ Get settings for the specified JDK version. """ -jdk_java_home = get_pref(prefs, 'jdk%d' % version, lambda: raw_input("Enter the path for JAVA_HOME for a JDK%d compiler (blank to use default JAVA_HOME): " % version)) +jdk_java_home = get_pref(prefs, 'jdk%d' % version, lambda: input("Enter the path for JAVA_HOME for a JDK%d compiler (blank to use default JAVA_HOME): " % version)) jdk_env = dict(os.environ) if jdk_java_home.strip() else None if jdk_env is not None: jdk_env['JAVA_HOME'] = jdk_java_home -javaVersion = cmd_output("%s/bin/java -version" % jdk_java_home, env=jdk_env) -if version == 8 and "1.8.0" not in javaVersion: - fail("JDK 8 is required") -elif "%d.0" % version not in javaVersion: +java_version = cmd_output("%s/bin/java -version" % jdk_java_home, env=jdk_env) +if version == 8: + if "1.8.0" not in java_version: +fail("JDK 8 is required") +elif "%d.0" % version not in java_version and '"%d"' % version not in java_version: fail("JDK %s is required" % version) return jdk_env @@ -271,7 +270,7 @@ def command_stage_docs(): versioned_docs_path = os.path.join(kafka_site_repo_path, docs_version(version)) if not os.path.exists(versioned_docs_path): -os.mkdir(versioned_docs_path, 0755) +os.mkdir(versioned_docs_path, 755) # The contents of the docs jar are site-docs/. We need to get rid of the site-docs prefix and dump everything # inside it into the docs version subdirectory in the kafka-site repo @@ -309,16 +308,16 @@ def command_release_announcement_email(): release_tags = sorted([t for t in tags if re.match(release_tag_pattern, t)]) release_version_num = release_tags[-1] if not user_ok("""Is the current release %s ? (y/n): """ % release_version_num): -release_version_num = raw_input('What is the current release version:') +release_version_num = input('What is the current release version:') validate_release_num(release_version_num) previous_release_version_num = release_tags[-2] if not user_ok("""Is the previous release %s ? (y/n): """ % previous_release_version_num): -
[kafka] 03/06: MINOR: Update version to 3.0.2
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a commit to branch 3.0 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 17f695c4c2526c43893c566bb7d61a5103276b31 Author: Tom Bentley AuthorDate: Fri Sep 2 10:45:14 2022 +0100 MINOR: Update version to 3.0.2 --- docs/js/templateData.js| 2 +- gradle.properties | 2 +- streams/quickstart/java/pom.xml| 2 +- streams/quickstart/java/src/main/resources/archetype-resources/pom.xml | 2 +- streams/quickstart/pom.xml | 2 +- tests/kafkatest/__init__.py| 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/js/templateData.js b/docs/js/templateData.js index 05541483dd..2609b50d6a 100644 --- a/docs/js/templateData.js +++ b/docs/js/templateData.js @@ -19,6 +19,6 @@ limitations under the License. var context={ "version": "30", "dotVersion": "3.0", -"fullDotVersion": "3.0.2-SNAPSHOT", +"fullDotVersion": "3.0.2", "scalaVersion": "2.13" }; diff --git a/gradle.properties b/gradle.properties index fc4d1bf2c1..329399cac9 100644 --- a/gradle.properties +++ b/gradle.properties @@ -20,7 +20,7 @@ group=org.apache.kafka # - tests/kafkatest/__init__.py # - tests/kafkatest/version.py (variable DEV_VERSION) # - kafka-merge-pr.py -version=3.0.2-SNAPSHOT +version=3.0.2 scalaVersion=2.13.6 task=build org.gradle.jvmargs=-Xmx2g -Xss4m -XX:+UseParallelGC diff --git a/streams/quickstart/java/pom.xml b/streams/quickstart/java/pom.xml index 6109a2be47..ce84f4f66a 100644 --- a/streams/quickstart/java/pom.xml +++ b/streams/quickstart/java/pom.xml @@ -26,7 +26,7 @@ org.apache.kafka streams-quickstart -3.0.2-SNAPSHOT +3.0.2 .. diff --git a/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml b/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml index 9415c364a0..e55003ede4 100644 --- a/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml +++ b/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml @@ -29,7 +29,7 @@ UTF-8 -3.0.2-SNAPSHOT +3.0.2 1.7.7 1.2.17 diff --git a/streams/quickstart/pom.xml b/streams/quickstart/pom.xml index 2c640549f1..f481c79052 100644 --- a/streams/quickstart/pom.xml +++ b/streams/quickstart/pom.xml @@ -22,7 +22,7 @@ org.apache.kafka streams-quickstart pom -3.0.2-SNAPSHOT +3.0.2 Kafka Streams :: Quickstart diff --git a/tests/kafkatest/__init__.py b/tests/kafkatest/__init__.py index 488e24d583..c8674298aa 100644 --- a/tests/kafkatest/__init__.py +++ b/tests/kafkatest/__init__.py @@ -22,4 +22,4 @@ # Instead, in development branches, the version should have a suffix of the form ".devN" # # For example, when Kafka is at version 1.0.0-SNAPSHOT, this should be something like "1.0.0.dev0" -__version__ = '3.0.2.dev0' +__version__ = '3.0.2'
[kafka] 05/06: MINOR: Update LICENSE-binary
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a commit to branch 3.0 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 286eceea7de742a35a4f0a6a979c704561e2039d Author: Tom Bentley AuthorDate: Fri Sep 2 12:21:43 2022 +0100 MINOR: Update LICENSE-binary --- LICENSE-binary | 20 ++-- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 057b88f0b2..34c456ed0b 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -219,16 +219,16 @@ jackson-module-jaxb-annotations-2.12.6 jackson-module-scala_2.13-2.12.6 jakarta.validation-api-2.0.2 javassist-3.27.0-GA -jetty-client-9.4.44.v20210927 -jetty-continuation-9.4.44.v20210927 -jetty-http-9.4.44.v20210927 -jetty-io-9.4.44.v20210927 -jetty-security-9.4.44.v20210927 -jetty-server-9.4.44.v20210927 -jetty-servlet-9.4.44.v20210927 -jetty-servlets-9.4.44.v20210927 -jetty-util-9.4.44.v20210927 -jetty-util-ajax-9.4.44.v20210927 +jetty-client-9.4.48.v20220622 +jetty-continuation-9.4.48.v20220622 +jetty-http-9.4.48.v20220622 +jetty-io-9.4.48.v20220622 +jetty-security-9.4.48.v20220622 +jetty-server-9.4.48.v20220622 +jetty-servlet-9.4.48.v20220622 +jetty-servlets-9.4.48.v20220622 +jetty-util-9.4.48.v20220622 +jetty-util-ajax-9.4.48.v20220622 jersey-common-2.34 jersey-server-2.34 log4j-1.2.17
[kafka] 04/06: MINOR: Update docs/upgrade.html
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a commit to branch 3.0 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 110a640f76fcf27625913ee0764be0d32f4dbb98 Author: Tom Bentley AuthorDate: Fri Sep 2 11:08:24 2022 +0100 MINOR: Update docs/upgrade.html --- docs/upgrade.html | 55 +++ 1 file changed, 55 insertions(+) diff --git a/docs/upgrade.html b/docs/upgrade.html index b13ae76252..b14f01dd1d 100644 --- a/docs/upgrade.html +++ b/docs/upgrade.html @@ -19,6 +19,61 @@
[kafka] 02/06: MINOR: Add configurable max receive size for SASL authentication requests
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a commit to branch 3.0 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 65a1e0451fc1012258582166732bba701331add1 Author: Manikumar Reddy AuthorDate: Mon May 16 19:25:02 2022 +0530 MINOR: Add configurable max receive size for SASL authentication requests This adds a new configuration `sasl.server.max.receive.size` that sets the maximum receive size for requests before and during authentication. Reviewers: Tom Bentley , Mickael Maison Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 2 + .../config/internals/BrokerSecurityConfigs.java| 6 +++ .../authenticator/SaslServerAuthenticator.java | 16 ++-- .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 ++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +-- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 ++ .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + 8 files changed, 77 insertions(+), 7 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index 69c9374f5b..d47545432a 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -32,6 +32,8 @@ files="(Fetcher|Sender|SenderTest|ConsumerCoordinator|KafkaConsumer|KafkaProducer|Utils|TransactionManager|TransactionManagerTest|KafkaAdminClient|NetworkClient|Admin|KafkaRaftClient|KafkaRaftClientTest|RaftClientTestContext).java"/> + { - invocation.getArgument(0).putInt(SaslServerAuthenticator.MAX_RECEIVE_SIZE + 1); + invocation.getArgument(0).putInt(BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE + 1); return 4; }); -assertThrows(InvalidReceiveException.class, authenticator::authenticate); +assertThrows(SaslAuthenticationException.class, authenticator::authenticate); verify(transportLayer).read(any(ByteBuffer.class)); } diff --git a/core/src/main/scala/kafka/server/KafkaConfig.scala b/core/src/main/scala/kafka/server/KafkaConfig.scala index c556d7ab81..35d6f5b90a 100755 --- a/core/src/main/scala/kafka/server/KafkaConfig.scala +++ b/core/src/main/scala/kafka/server/KafkaConfig.scala @@ -253,6 +253,7 @@ object Defaults { /** * General Security configuration ***/ val ConnectionsMaxReauthMsDefault = 0L + val DefaultServerMaxMaxReceiveSize = BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE val DefaultPrincipalSerde = classOf[DefaultKafkaPrincipalBuilder] /** * Sasl configuration ***/ @@ -549,6 +550,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassProp = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_CONFIG val ConnectionsMaxReauthMsProp = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS + val SaslServerMaxReceiveSizeProp = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_CONFIG val securityProviderClassProp = SecurityConfig.SECURITY_PROVIDERS_CONFIG /** * SSL Configuration / @@ -960,6 +962,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassDoc = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_DOC val ConnectionsMaxReauthMsDoc = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS_DOC + val SaslServerMaxReceiveSizeDoc = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_DOC val securityProviderClassDoc = SecurityConfig.SECURITY_PROVIDERS_DOC /** * SSL Configuration / @@ -1249,6 +1252,7 @@ object KafkaConfig { /** * General Security Configuration / .define(ConnectionsMaxReauthMsProp, LONG, Defaults.ConnectionsMaxReauthMsDefault, MEDIUM, ConnectionsMaxReauthMsDoc) + .define(SaslServerMaxReceiveSizeProp, INT, Defaults.DefaultServerMaxMaxReceiveSize, MEDIUM, SaslServerMaxReceiveSizeDoc) .define(securityProviderClassProp, STRING, null, LOW, securityProviderClassDoc) /** * SSL Configuration / diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala index 2e38df00f1..a736bf4357 100755 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala @@ -791,6 +791,8 @@ class KafkaConfigTest { case KafkaConfig.KafkaMetricsReporterClassesProp => // ignore case KafkaConfig.KafkaMetricsPollingIntervalSecondsProp => //ignore +case KafkaConfig.SaslServerMaxReceiveSizeProp => assertPropertyInvalid(baseProperties, name, "not_a_number") + // Raft Quorum Configs case
[kafka] branch 3.0 updated (967b89f786 -> 1142808787)
This is an automated email from the ASF dual-hosted git repository. tombentley pushed a change to branch 3.0 in repository https://gitbox.apache.org/repos/asf/kafka.git from 967b89f786 MINOR: Add note on IDEMPOTENT_WRITE ACL to notable changes (#12260) new aaceb6b79b MINOR: Add more validation during KRPC deserialization new 65a1e0451f MINOR: Add configurable max receive size for SASL authentication requests new 17f695c4c2 MINOR: Update version to 3.0.2 new 110a640f76 MINOR: Update docs/upgrade.html new 286eceea7d MINOR: Update LICENSE-binary new 1142808787 KAFKA-10712; Update release scripts to Python3 (#11538) The 6 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: LICENSE-binary | 20 +-- checkstyle/suppressions.xml| 6 + .../config/internals/BrokerSecurityConfigs.java| 6 + .../kafka/common/protocol/ByteBufferAccessor.java | 9 +- .../common/protocol/DataInputStreamReadable.java | 139 - .../org/apache/kafka/common/protocol/Readable.java | 8 +- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../authenticator/SaslServerAuthenticator.java | 16 ++- .../common/message/SimpleArraysMessageTest.java| 54 .../common/protocol/ByteBufferAccessorTest.java| 58 + .../kafka/common/record/DefaultRecordTest.java | 14 +++ .../kafka/common/requests/RequestContextTest.java | 83 .../kafka/common/requests/RequestResponseTest.java | 92 ++ .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 +++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +- .../common/message/SimpleArraysMessage.json} | 15 ++- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 + .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + docs/js/templateData.js| 2 +- docs/upgrade.html | 55 .../apache/kafka/message/MessageDataGenerator.java | 9 +- gradle.properties | 2 +- .../kafka/raft/internals/RecordsIterator.java | 105 +++- .../apache/kafka/raft/internals/StringSerde.java | 3 +- release.py | 49 release_notes.py | 29 ++--- streams/quickstart/java/pom.xml| 2 +- .../src/main/resources/archetype-resources/pom.xml | 2 +- streams/quickstart/pom.xml | 2 +- tests/kafkatest/__init__.py| 2 +- 33 files changed, 601 insertions(+), 255 deletions(-) delete mode 100644 clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java create mode 100644 clients/src/test/java/org/apache/kafka/common/message/SimpleArraysMessageTest.java create mode 100644 clients/src/test/java/org/apache/kafka/common/protocol/ByteBufferAccessorTest.java copy clients/src/{main/resources/common/message/ResponseHeader.json => test/resources/common/message/SimpleArraysMessage.json} (71%)
[kafka] 05/05: MINOR: Update LICENSE-binary
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.2 in repository https://gitbox.apache.org/repos/asf/kafka.git commit d7398e619f40c4505a430675c22ffad81890311c Author: Mickael Maison AuthorDate: Fri Sep 2 13:17:39 2022 +0200 MINOR: Update LICENSE-binary --- LICENSE-binary | 20 ++-- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index c2400694f2..ed3e0f2ac7 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -220,16 +220,16 @@ jackson-module-paranamer-2.10.5 jackson-module-scala_2.13-2.12.6 jakarta.validation-api-2.0.2 javassist-3.27.0-GA -jetty-client-9.4.44.v20210927 -jetty-continuation-9.4.44.v20210927 -jetty-http-9.4.44.v20210927 -jetty-io-9.4.44.v20210927 -jetty-security-9.4.44.v20210927 -jetty-server-9.4.44.v20210927 -jetty-servlet-9.4.44.v20210927 -jetty-servlets-9.4.44.v20210927 -jetty-util-9.4.44.v20210927 -jetty-util-ajax-9.4.44.v20210927 +jetty-client-9.4.48.v20220622 +jetty-continuation-9.4.48.v20220622 +jetty-http-9.4.48.v20220622 +jetty-io-9.4.48.v20220622 +jetty-security-9.4.48.v20220622 +jetty-server-9.4.48.v20220622 +jetty-servlet-9.4.48.v20220622 +jetty-servlets-9.4.48.v20220622 +jetty-util-9.4.48.v20220622 +jetty-util-ajax-9.4.48.v20220622 jersey-common-2.34 jersey-server-2.34 jose4j-0.7.9
[kafka] 04/05: MINOR: Align Scala version to 2.13.8
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.2 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 16c49bdd814e6aaa58b79ec0da48e5b62c98eef4 Author: Mickael Maison AuthorDate: Fri Sep 2 13:17:14 2022 +0200 MINOR: Align Scala version to 2.13.8 --- bin/kafka-run-class.sh | 2 +- bin/windows/kafka-run-class.bat | 2 +- gradle.properties | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/kafka-run-class.sh b/bin/kafka-run-class.sh index 490f930b8c..a9096ea021 100755 --- a/bin/kafka-run-class.sh +++ b/bin/kafka-run-class.sh @@ -48,7 +48,7 @@ should_include_file() { base_dir=$(dirname $0)/.. if [ -z "$SCALA_VERSION" ]; then - SCALA_VERSION=2.13.6 + SCALA_VERSION=2.13.8 if [[ -f "$base_dir/gradle.properties" ]]; then SCALA_VERSION=`grep "^scalaVersion=" "$base_dir/gradle.properties" | cut -d= -f 2` fi diff --git a/bin/windows/kafka-run-class.bat b/bin/windows/kafka-run-class.bat index 26ef84a4f5..df1e20ba11 100755 --- a/bin/windows/kafka-run-class.bat +++ b/bin/windows/kafka-run-class.bat @@ -27,7 +27,7 @@ set BASE_DIR=%CD% popd IF ["%SCALA_VERSION%"] EQU [""] ( - set SCALA_VERSION=2.13.6 + set SCALA_VERSION=2.13.8 ) IF ["%SCALA_BINARY_VERSION%"] EQU [""] ( diff --git a/gradle.properties b/gradle.properties index 6d9a874d75..a501109fc8 100644 --- a/gradle.properties +++ b/gradle.properties @@ -21,7 +21,7 @@ group=org.apache.kafka # - tests/kafkatest/version.py (variable DEV_VERSION) # - kafka-merge-pr.py version=3.2.2 -scalaVersion=2.13.6 +scalaVersion=2.13.8 task=build org.gradle.jvmargs=-Xmx2g -Xss4m -XX:+UseParallelGC org.gradle.parallel=true
[kafka] 01/05: MINOR: Add more validation during KRPC deserialization
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.2 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 2bfa24b2bd416e7b8c4a0c566b984c43904fdecb Author: Colin Patrick McCabe AuthorDate: Fri May 20 15:23:12 2022 -0700 MINOR: Add more validation during KRPC deserialization When deserializing KRPC (which is used for RPCs sent to Kafka, Kafka Metadata records, and some other things), check that we have at least N bytes remaining before allocating an array of size N. Remove DataInputStreamReadable since it was hard to make this class aware of how many bytes were remaining. Instead, when reading an individual record in the Raft layer, simply create a ByteBufferAccessor with a ByteBuffer containing just the bytes we're interested in. Add SimpleArraysMessageTest and ByteBufferAccessorTest. Also add some additional tests in RequestResponseTest. Reviewers: Tom Bentley , Mickael Maison , Colin McCabe Co-authored-by: Colin McCabe Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 4 + .../kafka/common/protocol/ByteBufferAccessor.java | 9 +- .../common/protocol/DataInputStreamReadable.java | 139 - .../org/apache/kafka/common/protocol/Readable.java | 8 +- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../common/message/SimpleArraysMessageTest.java| 54 .../common/protocol/ByteBufferAccessorTest.java| 58 + .../kafka/common/record/DefaultRecordTest.java | 14 +++ .../kafka/common/requests/RequestContextTest.java | 83 .../kafka/common/requests/RequestResponseTest.java | 91 ++ .../common/message/SimpleArraysMessage.json| 29 + .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../apache/kafka/message/MessageDataGenerator.java | 9 +- .../kafka/raft/internals/RecordsIterator.java | 105 +++- .../apache/kafka/raft/internals/StringSerde.java | 3 +- 16 files changed, 433 insertions(+), 187 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index cd82efe142..2d53e7706b 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -168,6 +168,10 @@ + + + diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java index bd0925d6db..f643f5b577 100644 --- a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java +++ b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java @@ -54,8 +54,15 @@ public class ByteBufferAccessor implements Readable, Writable { } @Override -public void readArray(byte[] arr) { +public byte[] readArray(int size) { +int remaining = buf.remaining(); +if (size > remaining) { +throw new RuntimeException("Error reading byte array of " + size + " byte(s): only " + remaining + +" byte(s) available"); +} +byte[] arr = new byte[size]; buf.get(arr); +return arr; } @Override diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java b/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java deleted file mode 100644 index 70ed52d6a0..00 --- a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - *http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.kafka.common.protocol; - -import org.apache.kafka.common.utils.ByteUtils; - -import java.io.Closeable; -import java.io.DataInputStream; -import java.io.IOException; -import java.nio.ByteBuffer; - -public class DataInputStreamReadable implements Readable, Closeable { -protected final DataInputStream input; - -public DataInputStreamReadable(DataInputStream input) { -
[kafka] 02/05: MINOR: Add configurable max receive size for SASL authentication requests
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.2 in repository https://gitbox.apache.org/repos/asf/kafka.git commit e86512aafdf6e8167b27c2a4249d7896f4645742 Author: Manikumar Reddy AuthorDate: Mon May 16 19:25:02 2022 +0530 MINOR: Add configurable max receive size for SASL authentication requests This adds a new configuration `sasl.server.max.receive.size` that sets the maximum receive size for requests before and during authentication. Reviewers: Tom Bentley , Mickael Maison Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 2 + .../config/internals/BrokerSecurityConfigs.java| 6 +++ .../authenticator/SaslServerAuthenticator.java | 16 ++-- .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 ++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +-- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 ++ .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + 8 files changed, 77 insertions(+), 7 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index 2d53e7706b..7266938cda 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -47,6 +47,8 @@ files="(Fetcher|Sender|SenderTest|ConsumerCoordinator|KafkaConsumer|KafkaProducer|Utils|TransactionManager|TransactionManagerTest|KafkaAdminClient|NetworkClient|Admin|KafkaRaftClient|KafkaRaftClientTest|RaftClientTestContext).java"/> + { - invocation.getArgument(0).putInt(SaslServerAuthenticator.MAX_RECEIVE_SIZE + 1); + invocation.getArgument(0).putInt(BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE + 1); return 4; }); -assertThrows(InvalidReceiveException.class, authenticator::authenticate); +assertThrows(SaslAuthenticationException.class, authenticator::authenticate); verify(transportLayer).read(any(ByteBuffer.class)); } diff --git a/core/src/main/scala/kafka/server/KafkaConfig.scala b/core/src/main/scala/kafka/server/KafkaConfig.scala index be2c8e72b9..4884259fcd 100755 --- a/core/src/main/scala/kafka/server/KafkaConfig.scala +++ b/core/src/main/scala/kafka/server/KafkaConfig.scala @@ -255,6 +255,7 @@ object Defaults { /** * General Security configuration ***/ val ConnectionsMaxReauthMsDefault = 0L + val DefaultServerMaxMaxReceiveSize = BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE val DefaultPrincipalSerde = classOf[DefaultKafkaPrincipalBuilder] /** * Sasl configuration ***/ @@ -559,6 +560,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassProp = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_CONFIG val ConnectionsMaxReauthMsProp = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS + val SaslServerMaxReceiveSizeProp = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_CONFIG val securityProviderClassProp = SecurityConfig.SECURITY_PROVIDERS_CONFIG /** * SSL Configuration / @@ -992,6 +994,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassDoc = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_DOC val ConnectionsMaxReauthMsDoc = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS_DOC + val SaslServerMaxReceiveSizeDoc = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_DOC val securityProviderClassDoc = SecurityConfig.SECURITY_PROVIDERS_DOC /** * SSL Configuration / @@ -1296,6 +1299,7 @@ object KafkaConfig { /** * General Security Configuration / .define(ConnectionsMaxReauthMsProp, LONG, Defaults.ConnectionsMaxReauthMsDefault, MEDIUM, ConnectionsMaxReauthMsDoc) + .define(SaslServerMaxReceiveSizeProp, INT, Defaults.DefaultServerMaxMaxReceiveSize, MEDIUM, SaslServerMaxReceiveSizeDoc) .define(securityProviderClassProp, STRING, null, LOW, securityProviderClassDoc) /** * SSL Configuration / diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala index a6597d8815..9362735d91 100755 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala @@ -944,6 +944,8 @@ class KafkaConfigTest { case KafkaConfig.KafkaMetricsReporterClassesProp => // ignore case KafkaConfig.KafkaMetricsPollingIntervalSecondsProp => //ignore +case KafkaConfig.SaslServerMaxReceiveSizeProp => assertPropertyInvalid(baseProperties, name, "not_a_number") + // Raft Quorum Configs case
[kafka] branch 3.2 updated (2e229db62d -> d7398e619f)
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a change to branch 3.2 in repository https://gitbox.apache.org/repos/asf/kafka.git from 2e229db62d KAFKA-14122: Fix flaky test DynamicBrokerReconfigurationTest#testKeyStoreAlter (#12452) new 2bfa24b2bd MINOR: Add more validation during KRPC deserialization new e86512aafd MINOR: Add configurable max receive size for SASL authentication requests new d14db1be58 MINOR: Bump version in upgrade guide to 3.2.2 new 16c49bdd81 MINOR: Align Scala version to 2.13.8 new d7398e619f MINOR: Update LICENSE-binary The 5 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: LICENSE-binary | 20 +-- bin/kafka-run-class.sh | 2 +- bin/windows/kafka-run-class.bat| 2 +- checkstyle/suppressions.xml| 6 + .../config/internals/BrokerSecurityConfigs.java| 6 + .../kafka/common/protocol/ByteBufferAccessor.java | 9 +- .../common/protocol/DataInputStreamReadable.java | 139 - .../org/apache/kafka/common/protocol/Readable.java | 8 +- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../authenticator/SaslServerAuthenticator.java | 16 ++- .../common/message/SimpleArraysMessageTest.java| 54 .../common/protocol/ByteBufferAccessorTest.java| 58 + .../kafka/common/record/DefaultRecordTest.java | 14 +++ .../kafka/common/requests/RequestContextTest.java | 83 .../kafka/common/requests/RequestResponseTest.java | 91 ++ .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 +++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +- ...ecordsMessage.json => SimpleArraysMessage.json} | 15 ++- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 + .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + docs/upgrade.html | 2 +- .../apache/kafka/message/MessageDataGenerator.java | 9 +- gradle.properties | 2 +- .../kafka/raft/internals/RecordsIterator.java | 105 +++- .../apache/kafka/raft/internals/StringSerde.java | 3 +- 28 files changed, 504 insertions(+), 214 deletions(-) delete mode 100644 clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java create mode 100644 clients/src/test/java/org/apache/kafka/common/message/SimpleArraysMessageTest.java create mode 100644 clients/src/test/java/org/apache/kafka/common/protocol/ByteBufferAccessorTest.java copy clients/src/test/resources/common/message/{SimpleRecordsMessage.json => SimpleArraysMessage.json} (71%)
[kafka] 03/05: MINOR: Bump version in upgrade guide to 3.2.2
This is an automated email from the ASF dual-hosted git repository. mimaison pushed a commit to branch 3.2 in repository https://gitbox.apache.org/repos/asf/kafka.git commit d14db1be58612ddfcf2668ccdc2752f077ec4929 Author: Mickael Maison AuthorDate: Fri Sep 2 12:59:13 2022 +0200 MINOR: Bump version in upgrade guide to 3.2.2 --- docs/upgrade.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/upgrade.html b/docs/upgrade.html index 38f45b7193..c656a575b2 100644 --- a/docs/upgrade.html +++ b/docs/upgrade.html @@ -19,7 +19,7 @@
[kafka] branch 3.3 updated: HOTFIX: fix PriorityQueue iteration to assign warmups in priority order (#12585)
This is an automated email from the ASF dual-hosted git repository. showuon pushed a commit to branch 3.3 in repository https://gitbox.apache.org/repos/asf/kafka.git The following commit(s) were added to refs/heads/3.3 by this push: new 387bcf4d80 HOTFIX: fix PriorityQueue iteration to assign warmups in priority order (#12585) 387bcf4d80 is described below commit 387bcf4d80779270a99be9e056ddc0ae3a369044 Author: A. Sophie Blee-Goldman AuthorDate: Fri Sep 2 03:14:34 2022 -0700 HOTFIX: fix PriorityQueue iteration to assign warmups in priority order (#12585) Based on a patch submitted to the confluentinc fork & then abandoned. Needed some updates and minor expansion but more or less just re-applied the changes proposed in confluentinc#697. Original PR has a very detailed justification for these changes but the tl;dr of it is that apparently the PriorityQueue's iterator does not actually guarantee to return elements in priority order. Reviewer: Luke Chen --- .../streams/processor/internals/assignment/TaskMovement.java | 6 -- .../processor/internals/assignment/TaskMovementTest.java | 11 +++ 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java index 38e64276ba..ec0fa5e11e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java @@ -107,7 +107,8 @@ final class TaskMovement { final int movementsNeeded = taskMovements.size(); -for (final TaskMovement movement : taskMovements) { +while (!taskMovements.isEmpty()) { +final TaskMovement movement = taskMovements.poll(); // Attempt to find a caught up standby, otherwise find any caught up client, failing that use the most // caught up client. final boolean moved = tryToSwapStandbyAndActiveOnCaughtUpClient(clientStates, caughtUpClientsByTaskLoad, movement) || @@ -157,7 +158,8 @@ final class TaskMovement { int movementsNeeded = 0; -for (final TaskMovement movement : taskMovements) { +while (!taskMovements.isEmpty()) { +final TaskMovement movement = taskMovements.poll(); final Function eligibleClientPredicate = clientId -> !clientStates.get(clientId).hasAssignedTask(movement.task); UUID sourceClient = caughtUpClientsByTaskLoad.poll( diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java index baf6d18496..a337deb801 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java @@ -244,15 +244,10 @@ public class TaskMovementTest { assertThat(client2, hasProperty("activeTasks", ClientState::activeTasks, mkSet(TASK_0_2))); assertThat(client3, hasProperty("activeTasks", ClientState::activeTasks, mkSet(TASK_0_1))); -// we should only assign one warmup, but it could be either one that needs to be migrated. +// we should only assign one warmup, and the task movement should have the highest priority assertThat(client1, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); -try { -assertThat(client2, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet(TASK_0_1))); -assertThat(client3, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); -} catch (final AssertionError ignored) { -assertThat(client2, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); -assertThat(client3, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet(TASK_0_2))); -} +assertThat(client2, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet(TASK_0_1))); +assertThat(client3, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); } @Test
[kafka] branch trunk updated: HOTFIX: fix PriorityQueue iteration to assign warmups in priority order (#12585)
This is an automated email from the ASF dual-hosted git repository. showuon pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/kafka.git The following commit(s) were added to refs/heads/trunk by this push: new 7ec10ce19a HOTFIX: fix PriorityQueue iteration to assign warmups in priority order (#12585) 7ec10ce19a is described below commit 7ec10ce19a1c5869a4713f9c9b6b12da5c63e42b Author: A. Sophie Blee-Goldman AuthorDate: Fri Sep 2 03:14:34 2022 -0700 HOTFIX: fix PriorityQueue iteration to assign warmups in priority order (#12585) Based on a patch submitted to the confluentinc fork & then abandoned. Needed some updates and minor expansion but more or less just re-applied the changes proposed in confluentinc#697. Original PR has a very detailed justification for these changes but the tl;dr of it is that apparently the PriorityQueue's iterator does not actually guarantee to return elements in priority order. Reviewer: Luke Chen --- .../streams/processor/internals/assignment/TaskMovement.java | 6 -- .../processor/internals/assignment/TaskMovementTest.java | 11 +++ 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java index 38e64276ba..ec0fa5e11e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovement.java @@ -107,7 +107,8 @@ final class TaskMovement { final int movementsNeeded = taskMovements.size(); -for (final TaskMovement movement : taskMovements) { +while (!taskMovements.isEmpty()) { +final TaskMovement movement = taskMovements.poll(); // Attempt to find a caught up standby, otherwise find any caught up client, failing that use the most // caught up client. final boolean moved = tryToSwapStandbyAndActiveOnCaughtUpClient(clientStates, caughtUpClientsByTaskLoad, movement) || @@ -157,7 +158,8 @@ final class TaskMovement { int movementsNeeded = 0; -for (final TaskMovement movement : taskMovements) { +while (!taskMovements.isEmpty()) { +final TaskMovement movement = taskMovements.poll(); final Function eligibleClientPredicate = clientId -> !clientStates.get(clientId).hasAssignedTask(movement.task); UUID sourceClient = caughtUpClientsByTaskLoad.poll( diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java index baf6d18496..a337deb801 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskMovementTest.java @@ -244,15 +244,10 @@ public class TaskMovementTest { assertThat(client2, hasProperty("activeTasks", ClientState::activeTasks, mkSet(TASK_0_2))); assertThat(client3, hasProperty("activeTasks", ClientState::activeTasks, mkSet(TASK_0_1))); -// we should only assign one warmup, but it could be either one that needs to be migrated. +// we should only assign one warmup, and the task movement should have the highest priority assertThat(client1, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); -try { -assertThat(client2, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet(TASK_0_1))); -assertThat(client3, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); -} catch (final AssertionError ignored) { -assertThat(client2, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); -assertThat(client3, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet(TASK_0_2))); -} +assertThat(client2, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet(TASK_0_1))); +assertThat(client3, hasProperty("standbyTasks", ClientState::standbyTasks, mkSet())); } @Test
[kafka] branch 2.8 updated: MINOR: Update NOTICE file
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a commit to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git The following commit(s) were added to refs/heads/2.8 by this push: new 3146c6ff4a MINOR: Update NOTICE file 3146c6ff4a is described below commit 3146c6ff4a24cc24d6039037e99edc8930d1958a Author: Manikumar Reddy AuthorDate: Fri Sep 2 14:58:59 2022 +0530 MINOR: Update NOTICE file --- NOTICE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NOTICE b/NOTICE index 674c942b12..677b5251d9 100644 --- a/NOTICE +++ b/NOTICE @@ -1,5 +1,5 @@ Apache Kafka -Copyright 2021 The Apache Software Foundation. +Copyright 2022 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/).
[kafka] 05/06: MINOR: Bump version in upgrade guide to 2.8.2
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a commit to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git commit f6583c9d63379e18702bab29bff622af30d80b57 Author: Manikumar Reddy AuthorDate: Fri Sep 2 14:08:55 2022 +0530 MINOR: Bump version in upgrade guide to 2.8.2 --- docs/upgrade.html | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/upgrade.html b/docs/upgrade.html index ecebba9ec2..0d5094657d 100644 --- a/docs/upgrade.html +++ b/docs/upgrade.html @@ -19,7 +19,7 @@
[kafka] 02/06: MINOR: Add configurable max receive size for SASL authentication requests
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a commit to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git commit b7dd40ff2bfb4e0cd726c1168e039d828daf113d Author: Manikumar Reddy AuthorDate: Mon May 16 19:25:02 2022 +0530 MINOR: Add configurable max receive size for SASL authentication requests This adds a new configuration `sasl.server.max.receive.size` that sets the maximum receive size for requests before and during authentication. Reviewers: Tom Bentley , Mickael Maison Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 2 + .../config/internals/BrokerSecurityConfigs.java| 6 +++ .../authenticator/SaslServerAuthenticator.java | 16 ++-- .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 ++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +-- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 ++ .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + 8 files changed, 77 insertions(+), 7 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index 8f91d98738..8324e763b8 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -32,6 +32,8 @@ files="(Fetcher|Sender|SenderTest|ConsumerCoordinator|KafkaConsumer|KafkaProducer|Utils|TransactionManager|TransactionManagerTest|KafkaAdminClient|NetworkClient|Admin|KafkaRaftClient|KafkaRaftClientTest|RaftClientTestContext).java"/> + { - invocation.getArgument(0).putInt(SaslServerAuthenticator.MAX_RECEIVE_SIZE + 1); + invocation.getArgument(0).putInt(BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE + 1); return 4; }); -assertThrows(InvalidReceiveException.class, authenticator::authenticate); +assertThrows(SaslAuthenticationException.class, authenticator::authenticate); verify(transportLayer).read(any(ByteBuffer.class)); } diff --git a/core/src/main/scala/kafka/server/KafkaConfig.scala b/core/src/main/scala/kafka/server/KafkaConfig.scala index f1158bba4e..8d5d12a1e9 100755 --- a/core/src/main/scala/kafka/server/KafkaConfig.scala +++ b/core/src/main/scala/kafka/server/KafkaConfig.scala @@ -249,6 +249,7 @@ object Defaults { /** * General Security configuration ***/ val ConnectionsMaxReauthMsDefault = 0L + val DefaultServerMaxMaxReceiveSize = BrokerSecurityConfigs.DEFAULT_SASL_SERVER_MAX_RECEIVE_SIZE /** * Sasl configuration ***/ val SaslMechanismInterBrokerProtocol = SaslConfigs.DEFAULT_SASL_MECHANISM @@ -539,6 +540,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassProp = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_CONFIG val ConnectionsMaxReauthMsProp = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS + val SaslServerMaxReceiveSizeProp = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_CONFIG val securityProviderClassProp = SecurityConfig.SECURITY_PROVIDERS_CONFIG /** * SSL Configuration / @@ -963,6 +965,7 @@ object KafkaConfig { /** Common Security Configuration */ val PrincipalBuilderClassDoc = BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_DOC val ConnectionsMaxReauthMsDoc = BrokerSecurityConfigs.CONNECTIONS_MAX_REAUTH_MS_DOC + val SaslServerMaxReceiveSizeDoc = BrokerSecurityConfigs.SASL_SERVER_MAX_RECEIVE_SIZE_DOC val securityProviderClassDoc = SecurityConfig.SECURITY_PROVIDERS_DOC /** * SSL Configuration / @@ -1247,6 +1250,7 @@ object KafkaConfig { /** * General Security Configuration / .define(ConnectionsMaxReauthMsProp, LONG, Defaults.ConnectionsMaxReauthMsDefault, MEDIUM, ConnectionsMaxReauthMsDoc) + .define(SaslServerMaxReceiveSizeProp, INT, Defaults.DefaultServerMaxMaxReceiveSize, MEDIUM, SaslServerMaxReceiveSizeDoc) .define(securityProviderClassProp, STRING, null, LOW, securityProviderClassDoc) /** * SSL Configuration / diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala index 1f803b6476..014a123651 100755 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala @@ -820,6 +820,8 @@ class KafkaConfigTest { case KafkaConfig.KafkaMetricsReporterClassesProp => // ignore case KafkaConfig.KafkaMetricsPollingIntervalSecondsProp => //ignore +case KafkaConfig.SaslServerMaxReceiveSizeProp => assertPropertyInvalid(baseProperties, name, "not_a_number") + // Raft Quorum Configs case
[kafka] 06/06: MINOR: Update version to 2.8.2
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a commit to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 00acd559a8edac568a107933e07c6c3da71e17a4 Author: Manikumar Reddy AuthorDate: Fri Sep 2 14:12:42 2022 +0530 MINOR: Update version to 2.8.2 --- docs/js/templateData.js| 2 +- gradle.properties | 2 +- streams/quickstart/java/pom.xml| 2 +- streams/quickstart/java/src/main/resources/archetype-resources/pom.xml | 2 +- streams/quickstart/pom.xml | 2 +- tests/kafkatest/__init__.py| 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/js/templateData.js b/docs/js/templateData.js index 2347ee4e65..4e10a60c5f 100644 --- a/docs/js/templateData.js +++ b/docs/js/templateData.js @@ -19,6 +19,6 @@ limitations under the License. var context={ "version": "28", "dotVersion": "2.8", -"fullDotVersion": "2.8.2-SNAPSHOT", +"fullDotVersion": "2.8.2", "scalaVersion": "2.13" }; diff --git a/gradle.properties b/gradle.properties index bb3da5abe7..b107407905 100644 --- a/gradle.properties +++ b/gradle.properties @@ -20,7 +20,7 @@ group=org.apache.kafka # - tests/kafkatest/__init__.py # - tests/kafkatest/version.py (variable DEV_VERSION) # - kafka-merge-pr.py -version=2.8.2-SNAPSHOT +version=2.8.2 scalaVersion=2.13.5 task=build org.gradle.jvmargs=-Xmx2g -Xss4m -XX:+UseParallelGC diff --git a/streams/quickstart/java/pom.xml b/streams/quickstart/java/pom.xml index 6e1576e990..d59ba32757 100644 --- a/streams/quickstart/java/pom.xml +++ b/streams/quickstart/java/pom.xml @@ -26,7 +26,7 @@ org.apache.kafka streams-quickstart -2.8.2-SNAPSHOT +2.8.2 .. diff --git a/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml b/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml index 6f00851f6d..3b289fa797 100644 --- a/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml +++ b/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml @@ -29,7 +29,7 @@ UTF-8 -2.8.2-SNAPSHOT +2.8.2 1.7.7 1.2.17 diff --git a/streams/quickstart/pom.xml b/streams/quickstart/pom.xml index c8049ad5e2..b29f2ad9ea 100644 --- a/streams/quickstart/pom.xml +++ b/streams/quickstart/pom.xml @@ -22,7 +22,7 @@ org.apache.kafka streams-quickstart pom -2.8.2-SNAPSHOT +2.8.2 Kafka Streams :: Quickstart diff --git a/tests/kafkatest/__init__.py b/tests/kafkatest/__init__.py index cbb6048154..c769c3702b 100644 --- a/tests/kafkatest/__init__.py +++ b/tests/kafkatest/__init__.py @@ -22,4 +22,4 @@ # Instead, in development branches, the version should have a suffix of the form ".devN" # # For example, when Kafka is at version 1.0.0-SNAPSHOT, this should be something like "1.0.0.dev0" -__version__ = '2.8.2.dev0' +__version__ = '2.8.2'
[kafka] 03/06: MINOR: Disable kraft system tests in 2.8 branch
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a commit to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 301a0d3f6434e7f9ec0a20ad0669eea01f936e40 Author: Manikumar Reddy AuthorDate: Thu Sep 1 23:26:03 2022 +0530 MINOR: Disable kraft system tests in 2.8 branch --- tests/kafkatest/sanity_checks/test_bounce.py | 4 -- .../sanity_checks/test_console_consumer.py | 5 +- .../sanity_checks/test_verifiable_producer.py | 74 -- tests/kafkatest/services/kafka/quorum.py | 6 +- tests/kafkatest/tests/core/security_test.py| 2 +- 5 files changed, 5 insertions(+), 86 deletions(-) diff --git a/tests/kafkatest/sanity_checks/test_bounce.py b/tests/kafkatest/sanity_checks/test_bounce.py index c01f23b0cb..c954fba17c 100644 --- a/tests/kafkatest/sanity_checks/test_bounce.py +++ b/tests/kafkatest/sanity_checks/test_bounce.py @@ -44,10 +44,6 @@ class TestBounce(Test): if self.zk: self.zk.start() -@cluster(num_nodes=6) -@parametrize(metadata_quorum=quorum.remote_raft) -@cluster(num_nodes=4) -@parametrize(metadata_quorum=quorum.colocated_raft) @cluster(num_nodes=4) @parametrize(metadata_quorum=quorum.zk) def test_simple_run(self, metadata_quorum): diff --git a/tests/kafkatest/sanity_checks/test_console_consumer.py b/tests/kafkatest/sanity_checks/test_console_consumer.py index 0847ce0cb4..2448b75e03 100644 --- a/tests/kafkatest/sanity_checks/test_console_consumer.py +++ b/tests/kafkatest/sanity_checks/test_console_consumer.py @@ -43,12 +43,9 @@ class ConsoleConsumerTest(Test): if self.zk: self.zk.start() -@cluster(num_nodes=3) -@matrix(security_protocol=['PLAINTEXT', 'SSL'], metadata_quorum=quorum.all_raft) + @cluster(num_nodes=4) -@matrix(security_protocol=['SASL_SSL'], sasl_mechanism=['PLAIN'], metadata_quorum=quorum.all_raft) @matrix(security_protocol=['SASL_SSL'], sasl_mechanism=['SCRAM-SHA-256', 'SCRAM-SHA-512']) # SCRAM not yet supported with Raft -@matrix(security_protocol=['SASL_PLAINTEXT', 'SASL_SSL'], metadata_quorum=quorum.all_raft) def test_lifecycle(self, security_protocol, sasl_mechanism='GSSAPI', metadata_quorum=quorum.zk): """Check that console consumer starts/stops properly, and that we are capturing log output.""" diff --git a/tests/kafkatest/sanity_checks/test_verifiable_producer.py b/tests/kafkatest/sanity_checks/test_verifiable_producer.py index 7fcb603d59..fb21c08da5 100644 --- a/tests/kafkatest/sanity_checks/test_verifiable_producer.py +++ b/tests/kafkatest/sanity_checks/test_verifiable_producer.py @@ -96,77 +96,3 @@ class TestVerifiableProducer(Test): num_produced = self.producer.num_acked assert num_produced == self.num_messages, "num_produced: %d, num_messages: %d" % (num_produced, self.num_messages) -@cluster(num_nodes=4) -@matrix(inter_broker_security_protocol=['PLAINTEXT', 'SSL'], metadata_quorum=[quorum.remote_raft]) -@matrix(inter_broker_security_protocol=['SASL_SSL'], inter_broker_sasl_mechanism=['PLAIN', 'GSSAPI'], -metadata_quorum=[quorum.remote_raft]) -def test_multiple_raft_security_protocols( -self, inter_broker_security_protocol, inter_broker_sasl_mechanism='GSSAPI', metadata_quorum=quorum.remote_raft): -""" -Test for remote Raft cases that we can start VerifiableProducer on the current branch snapshot version, and -verify that we can produce a small number of messages. The inter-controller and broker-to-controller -security protocols are defined to be different (which differs from the above test, where they were the same). -""" -self.kafka.security_protocol = self.kafka.interbroker_security_protocol = inter_broker_security_protocol -self.kafka.client_sasl_mechanism = self.kafka.interbroker_sasl_mechanism = inter_broker_sasl_mechanism -controller_quorum = self.kafka.controller_quorum -sasl_mechanism = 'PLAIN' if inter_broker_sasl_mechanism == 'GSSAPI' else 'GSSAPI' -if inter_broker_security_protocol == 'PLAINTEXT': -controller_security_protocol = 'SSL' -intercontroller_security_protocol = 'SASL_SSL' -elif inter_broker_security_protocol == 'SSL': -controller_security_protocol = 'SASL_SSL' -intercontroller_security_protocol = 'PLAINTEXT' -else: # inter_broker_security_protocol == 'SASL_SSL' -controller_security_protocol = 'PLAINTEXT' -intercontroller_security_protocol = 'SSL' -controller_quorum.controller_security_protocol = controller_security_protocol -controller_quorum.controller_sasl_mechanism = sasl_mechanism -controller_quorum.intercontroller_security_protocol = intercontroller_security_protocol -controller_quorum.intercontroller_sasl_mechanism =
[kafka] 01/06: MINOR: Add more validation during KRPC deserialization
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a commit to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 14951a83e3fdead212156e5532359500d72f68bc Author: Colin Patrick McCabe AuthorDate: Fri May 20 15:23:12 2022 -0700 MINOR: Add more validation during KRPC deserialization When deserializing KRPC (which is used for RPCs sent to Kafka, Kafka Metadata records, and some other things), check that we have at least N bytes remaining before allocating an array of size N. Remove DataInputStreamReadable since it was hard to make this class aware of how many bytes were remaining. Instead, when reading an individual record in the Raft layer, simply create a ByteBufferAccessor with a ByteBuffer containing just the bytes we're interested in. Add SimpleArraysMessageTest and ByteBufferAccessorTest. Also add some additional tests in RequestResponseTest. Reviewers: Tom Bentley , Mickael Maison , Colin McCabe Co-authored-by: Colin McCabe Co-authored-by: Manikumar Reddy Co-authored-by: Mickael Maison --- checkstyle/suppressions.xml| 4 + .../kafka/common/protocol/ByteBufferAccessor.java | 14 +++- .../common/protocol/DataInputStreamReadable.java | 18 - .../org/apache/kafka/common/protocol/Readable.java | 9 +-- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../common/message/SimpleArraysMessageTest.java| 54 + .../common/protocol/ByteBufferAccessorTest.java| 58 ++ .../kafka/common/record/DefaultRecordTest.java | 14 .../kafka/common/requests/RequestContextTest.java | 83 +++ .../kafka/common/requests/RequestResponseTest.java | 93 ++ .../common/message/SimpleArraysMessage.json| 29 +++ .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../apache/kafka/message/MessageDataGenerator.java | 9 ++- .../apache/kafka/raft/internals/StringSerde.java | 3 +- 15 files changed, 378 insertions(+), 24 deletions(-) diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index 8163f78f85..8f91d98738 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -153,6 +153,10 @@ + + + diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java index 3c5c309731..712973e369 100644 --- a/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java +++ b/clients/src/main/java/org/apache/kafka/common/protocol/ByteBufferAccessor.java @@ -54,8 +54,15 @@ public class ByteBufferAccessor implements Readable, Writable { } @Override -public void readArray(byte[] arr) { +public byte[] readArray(int size) { +int remaining = buf.remaining(); +if (size > remaining) { +throw new RuntimeException("Error reading byte array of " + size + " byte(s): only " + remaining + + " byte(s) available"); +} +byte[] arr = new byte[size]; buf.get(arr); +return arr; } @Override @@ -133,6 +140,11 @@ public class ByteBufferAccessor implements Readable, Writable { return ByteUtils.readVarlong(buf); } +@Override +public int remaining() { +return buf.remaining(); +} + public void flip() { buf.flip(); } diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java b/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java index 93c6c597d7..3f0b96757e 100644 --- a/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java +++ b/clients/src/main/java/org/apache/kafka/common/protocol/DataInputStreamReadable.java @@ -76,10 +76,12 @@ public class DataInputStreamReadable implements Readable, Closeable { } @Override -public void readArray(byte[] arr) { +public byte[] readArray(final int length) { try { +byte[] arr = new byte[length]; input.readFully(arr); -} catch (IOException e) { +return arr; +} catch (IOException e) { throw new RuntimeException(e); } } @@ -95,8 +97,7 @@ public class DataInputStreamReadable implements Readable, Closeable { @Override public ByteBuffer readByteBuffer(int length) { -byte[] arr = new byte[length]; -readArray(arr); +byte[] arr = readArray(length); return ByteBuffer.wrap(arr); } @@ -118,6 +119,15 @@ public class DataInputStreamReadable implements Readable, Closeable { } } +@Override +public int remaining() { +try { +return input.available(); +
[kafka] 04/06: MINOR: Update LICENSE for 2.8.2
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a commit to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git commit 7ef72abb4d5ea65405d6115a328cf2c704de2c2d Author: Manikumar Reddy AuthorDate: Fri Sep 2 14:01:42 2022 +0530 MINOR: Update LICENSE for 2.8.2 --- LICENSE-binary | 37 +++-- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index d58b01716a..7d885849c6 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -218,32 +218,33 @@ jackson-jaxrs-json-provider-2.10.5 jackson-module-jaxb-annotations-2.10.5 jackson-module-paranamer-2.10.5 jackson-module-scala_2.13-2.10.5 +jackson-module-scala_2.12-2.10.5 jakarta.validation-api-2.0.2 javassist-3.27.0-GA -jetty-client-9.4.43.v20210629 -jetty-continuation-9.4.43.v20210629 -jetty-http-9.4.43.v20210629 -jetty-io-9.4.43.v20210629 -jetty-security-9.4.43.v20210629 -jetty-server-9.4.43.v20210629 -jetty-servlet-9.4.43.v20210629 -jetty-servlets-9.4.43.v20210629 -jetty-util-9.4.43.v20210629 -jetty-util-ajax-9.4.43.v20210629 +jetty-client-9.4.48.v20220622 +jetty-continuation-9.4.48.v20220622 +jetty-http-9.4.48.v20220622 +jetty-io-9.4.48.v20220622 +jetty-security-9.4.48.v20220622 +jetty-server-9.4.48.v20220622 +jetty-servlet-9.4.48.v20220622 +jetty-servlets-9.4.48.v20220622 +jetty-util-9.4.48.v20220622 +jetty-util-ajax-9.4.48.v20220622 jersey-common-2.34 jersey-server-2.34 log4j-1.2.17 lz4-java-1.7.1 maven-artifact-3.8.1 metrics-core-2.2.0 -netty-buffer-4.1.62.Final -netty-codec-4.1.62.Final -netty-common-4.1.62.Final -netty-handler-4.1.62.Final -netty-resolver-4.1.62.Final -netty-transport-4.1.62.Final -netty-transport-native-epoll-4.1.62.Final -netty-transport-native-unix-common-4.1.62.Final +netty-buffer-4.1.73.Final +netty-codec-4.1.73.Final +netty-common-4.1.73.Final +netty-handler-4.1.73.Final +netty-resolver-4.1.73.Final +netty-transport-4.1.73.Final +netty-transport-native-epoll-4.1.73.Final +netty-transport-native-unix-common-4.1.73.Final plexus-utils-3.2.1 rocksdbjni-5.18.4 scala-collection-compat_2.13-2.3.0
[kafka] branch 2.8 updated (cd032154a7 -> 00acd559a8)
This is an automated email from the ASF dual-hosted git repository. manikumar pushed a change to branch 2.8 in repository https://gitbox.apache.org/repos/asf/kafka.git from cd032154a7 KAFKA-14107: Upgrade Jetty version for CVE fixes (#12440) new 14951a83e3 MINOR: Add more validation during KRPC deserialization new b7dd40ff2b MINOR: Add configurable max receive size for SASL authentication requests new 301a0d3f64 MINOR: Disable kraft system tests in 2.8 branch new 7ef72abb4d MINOR: Update LICENSE for 2.8.2 new f6583c9d63 MINOR: Bump version in upgrade guide to 2.8.2 new 00acd559a8 MINOR: Update version to 2.8.2 The 6 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: LICENSE-binary | 37 - checkstyle/suppressions.xml| 6 ++ .../config/internals/BrokerSecurityConfigs.java| 6 ++ .../kafka/common/protocol/ByteBufferAccessor.java | 14 +++- .../common/protocol/DataInputStreamReadable.java | 18 - .../org/apache/kafka/common/protocol/Readable.java | 9 +-- .../apache/kafka/common/record/DefaultRecord.java | 2 + .../authenticator/SaslServerAuthenticator.java | 16 +++- .../common/message/SimpleArraysMessageTest.java| 54 + .../common/protocol/ByteBufferAccessorTest.java| 58 ++ .../kafka/common/record/DefaultRecordTest.java | 14 .../kafka/common/requests/RequestContextTest.java | 83 +++ .../kafka/common/requests/RequestResponseTest.java | 93 ++ .../kafka/common/security/TestSecurityConfig.java | 2 + .../authenticator/SaslAuthenticatorTest.java | 46 +++ .../authenticator/SaslServerAuthenticatorTest.java | 6 +- ...ecordsMessage.json => SimpleArraysMessage.json} | 15 ++-- core/src/main/scala/kafka/server/KafkaConfig.scala | 4 + .../main/scala/kafka/tools/TestRaftServer.scala| 6 +- .../scala/kafka/raft/KafkaMetadataLogTest.scala| 6 +- .../scala/unit/kafka/server/KafkaConfigTest.scala | 2 + docs/js/templateData.js| 2 +- docs/upgrade.html | 4 +- .../apache/kafka/message/MessageDataGenerator.java | 9 ++- gradle.properties | 2 +- .../apache/kafka/raft/internals/StringSerde.java | 3 +- streams/quickstart/java/pom.xml| 2 +- .../src/main/resources/archetype-resources/pom.xml | 2 +- streams/quickstart/pom.xml | 2 +- tests/kafkatest/__init__.py| 2 +- tests/kafkatest/sanity_checks/test_bounce.py | 4 - .../sanity_checks/test_console_consumer.py | 5 +- .../sanity_checks/test_verifiable_producer.py | 74 - tests/kafkatest/services/kafka/quorum.py | 6 +- tests/kafkatest/tests/core/security_test.py| 2 +- 35 files changed, 468 insertions(+), 148 deletions(-) create mode 100644 clients/src/test/java/org/apache/kafka/common/message/SimpleArraysMessageTest.java create mode 100644 clients/src/test/java/org/apache/kafka/common/protocol/ByteBufferAccessorTest.java copy clients/src/test/resources/common/message/{SimpleRecordsMessage.json => SimpleArraysMessage.json} (71%)