svn commit: r56038 - in /release/kafka: ./ 3.2.1/

2022-07-29 Thread cmccabe
Author: cmccabe
Date: Fri Jul 29 23:19:36 2022
New Revision: 56038

Log:
Release 3.2.1

Added:
release/kafka/3.2.1/
release/kafka/3.2.1/RELEASE_NOTES.html
release/kafka/3.2.1/RELEASE_NOTES.html.asc
release/kafka/3.2.1/RELEASE_NOTES.html.md5
release/kafka/3.2.1/RELEASE_NOTES.html.sha1
release/kafka/3.2.1/RELEASE_NOTES.html.sha512
release/kafka/3.2.1/kafka-3.2.1-src.tgz   (with props)
release/kafka/3.2.1/kafka-3.2.1-src.tgz.asc
release/kafka/3.2.1/kafka-3.2.1-src.tgz.md5
release/kafka/3.2.1/kafka-3.2.1-src.tgz.sha1
release/kafka/3.2.1/kafka-3.2.1-src.tgz.sha512
release/kafka/3.2.1/kafka_2.12-3.2.1-site-docs.tgz   (with props)
release/kafka/3.2.1/kafka_2.12-3.2.1-site-docs.tgz.asc
release/kafka/3.2.1/kafka_2.12-3.2.1-site-docs.tgz.md5
release/kafka/3.2.1/kafka_2.12-3.2.1-site-docs.tgz.sha1
release/kafka/3.2.1/kafka_2.12-3.2.1-site-docs.tgz.sha512
release/kafka/3.2.1/kafka_2.12-3.2.1.tgz   (with props)
release/kafka/3.2.1/kafka_2.12-3.2.1.tgz.asc
release/kafka/3.2.1/kafka_2.12-3.2.1.tgz.md5
release/kafka/3.2.1/kafka_2.12-3.2.1.tgz.sha1
release/kafka/3.2.1/kafka_2.12-3.2.1.tgz.sha512
release/kafka/3.2.1/kafka_2.13-3.2.1-site-docs.tgz   (with props)
release/kafka/3.2.1/kafka_2.13-3.2.1-site-docs.tgz.asc
release/kafka/3.2.1/kafka_2.13-3.2.1-site-docs.tgz.md5
release/kafka/3.2.1/kafka_2.13-3.2.1-site-docs.tgz.sha1
release/kafka/3.2.1/kafka_2.13-3.2.1-site-docs.tgz.sha512
release/kafka/3.2.1/kafka_2.13-3.2.1.tgz   (with props)
release/kafka/3.2.1/kafka_2.13-3.2.1.tgz.asc
release/kafka/3.2.1/kafka_2.13-3.2.1.tgz.md5
release/kafka/3.2.1/kafka_2.13-3.2.1.tgz.sha1
release/kafka/3.2.1/kafka_2.13-3.2.1.tgz.sha512
Modified:
release/kafka/KEYS

Added: release/kafka/3.2.1/RELEASE_NOTES.html
==
--- release/kafka/3.2.1/RELEASE_NOTES.html (added)
+++ release/kafka/3.2.1/RELEASE_NOTES.html Fri Jul 29 23:19:36 2022
@@ -0,0 +1,31 @@
+Release Notes - Kafka - Version 3.2.1
+Below is a summary of the JIRA issues addressed in the 3.2.1 release of 
Kafka. For full documentation of the
+release, a guide to get started, and information about the project, see 
the https://kafka.apache.org/;>Kafka
+project site.
+
+Note about upgrades: Please carefully review the
+https://kafka.apache.org/32/documentation.html#upgrade;>upgrade 
documentation for this release thoroughly
+before upgrading your cluster. The upgrade notes discuss any critical 
information about incompatibilities and breaking
+changes, performance changes, and any other changes that might impact your 
production deployment of Kafka.
+
+The documentation for the most recent release can be found at
+https://kafka.apache.org/documentation.html;>https://kafka.apache.org/documentation.html.
+Improvement
+
+[https://issues.apache.org/jira/browse/KAFKA-14013;>KAFKA-14013] - 
Limit the length of the `reason` field sent on the wire
+
+Bug
+
+[https://issues.apache.org/jira/browse/KAFKA-13474;>KAFKA-13474] - 
Regression in dynamic update of broker certificate
+[https://issues.apache.org/jira/browse/KAFKA-13572;>KAFKA-13572] - 
Negative value for 'Preferred Replica Imbalance' metric
+[https://issues.apache.org/jira/browse/KAFKA-13773;>KAFKA-13773] - 
Data loss after recovery from crash due to full hard disk
+[https://issues.apache.org/jira/browse/KAFKA-13861;>KAFKA-13861] - 
validateOnly request field does not work for CreatePartition requests in Kraft 
mode.
+[https://issues.apache.org/jira/browse/KAFKA-13899;>KAFKA-13899] - 
Inconsistent error codes returned from AlterConfig APIs
+[https://issues.apache.org/jira/browse/KAFKA-13998;>KAFKA-13998] - 
JoinGroupRequestData 'reason' can be too large
+[https://issues.apache.org/jira/browse/KAFKA-14010;>KAFKA-14010] - 
alterISR request won't retry when receiving retriable error
+[https://issues.apache.org/jira/browse/KAFKA-14024;>KAFKA-14024] - 
Consumer stuck during cooperative rebalance for Commit offset in 
onJoinPrepare
+[https://issues.apache.org/jira/browse/KAFKA-14035;>KAFKA-14035] - 
QuorumController handleRenounce throws NPE
+[https://issues.apache.org/jira/browse/KAFKA-14055;>KAFKA-14055] - 
Transaction markers may be lost during cleaning if data keys conflict with 
marker keys
+[https://issues.apache.org/jira/browse/KAFKA-14062;>KAFKA-14062] - 
OAuth client token refresh fails with SASL extensions
+[https://issues.apache.org/jira/browse/KAFKA-14079;>KAFKA-14079] - 
Source task will not commit offsets and develops memory leak if 
"error.tolerance" is set to "all"
+

Added: release/kafka/3.2.1/RELEASE_NOTES.html.asc
==
--- release/kafka/3.2.1/RELEASE_NOTES.html.asc (added)
+++ release/kafka/3.2.1/RELEASE_NOTES.html.asc Fri Jul 29 23:19:36 2022
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+

[kafka] branch trunk updated: Minor: enable index for emit final sliding window (#12461)

2022-07-29 Thread guozhang
This is an automated email from the ASF dual-hosted git repository.

guozhang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
 new f7ac5d3d00 Minor: enable index for emit final sliding window (#12461)
f7ac5d3d00 is described below

commit f7ac5d3d00f3cd3caa25c3003900bdb245d5252e
Author: Hao Li <1127478+lihao...@users.noreply.github.com>
AuthorDate: Fri Jul 29 14:47:25 2022 -0700

Minor: enable index for emit final sliding window (#12461)

Enable index for sliding window emit final case as it's faster to fetch 
windows for particular key

Reviewers: Guozhang Wang 
---
 .../kafka/streams/kstream/internals/SlidingWindowedKStreamImpl.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/SlidingWindowedKStreamImpl.java
 
b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/SlidingWindowedKStreamImpl.java
index 5ca6b911b7..587d2d5a87 100644
--- 
a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/SlidingWindowedKStreamImpl.java
+++ 
b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/SlidingWindowedKStreamImpl.java
@@ -233,7 +233,7 @@ public class SlidingWindowedKStreamImpl extends 
AbstractStream imple
 Duration.ofMillis(retentionPeriod),
 Duration.ofMillis(windows.timeDifferenceMs()),
 false,
-false
+true
 ) :
 Stores.persistentTimestampedWindowStore(
 materialized.storeName(),



[kafka] branch trunk updated: MINOR: convert some more junit tests to support KRaft (#12456)

2022-07-29 Thread cmccabe
This is an automated email from the ASF dual-hosted git repository.

cmccabe pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
 new e26772ef61 MINOR: convert some more junit tests to support KRaft 
(#12456)
e26772ef61 is described below

commit e26772ef616d1095efb7e48baa44842df8aeb058
Author: Colin Patrick McCabe 
AuthorDate: Fri Jul 29 13:36:40 2022 -0700

MINOR: convert some more junit tests to support KRaft (#12456)

* MINOR: convert some more junit tests to support KRaft

Introduce TestUtils#waitUntilLeaderIsElectedOrChangedWithAdmin, a ZK-free 
alternative to
TestUtils#waitUntilLeaderIsElectedOrChanged.

Convert PlaintextProducerSendTest, SslProducerSendTest, 
TransactionsWithMaxInFlightOneTest,
AddPartitionsToTxnRequestServerTest and KafkaMetricsReporterTest to support 
KRaft

Reviewers: dengziming , David Arthur 

---
 .../kafka/api/BaseProducerSendTest.scala   | 102 -
 .../kafka/api/PlaintextProducerSendTest.scala  |  51 ++-
 .../api/TransactionsWithMaxInFlightOneTest.scala   |  24 ++---
 .../kafka/server/QuorumTestHarness.scala   |  40 +---
 .../AddPartitionsToTxnRequestServerTest.scala  |  16 ++--
 .../kafka/server/KafkaMetricsReporterTest.scala|  55 ++-
 .../test/scala/unit/kafka/utils/TestUtils.scala|  60 +++-
 7 files changed, 226 insertions(+), 122 deletions(-)

diff --git 
a/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala 
b/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala
index 61870b073d..ce3cd32afd 100644
--- a/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala
+++ b/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala
@@ -19,22 +19,24 @@ package kafka.api
 
 import java.time.Duration
 import java.nio.charset.StandardCharsets
-import java.util.Properties
+import java.util.{Collections, Properties}
 import java.util.concurrent.TimeUnit
-
 import kafka.integration.KafkaServerTestHarness
 import kafka.log.LogConfig
 import kafka.server.KafkaConfig
-import kafka.utils.TestUtils
+import kafka.utils.{TestInfoUtils, TestUtils}
+import org.apache.kafka.clients.admin.{Admin, NewPartitions}
 import org.apache.kafka.clients.consumer.KafkaConsumer
 import org.apache.kafka.clients.producer._
 import org.apache.kafka.common.errors.TimeoutException
-import org.apache.kafka.common.network.ListenerName
+import org.apache.kafka.common.network.{ListenerName, Mode}
 import org.apache.kafka.common.record.TimestampType
 import org.apache.kafka.common.security.auth.SecurityProtocol
 import org.apache.kafka.common.{KafkaException, TopicPartition}
 import org.junit.jupiter.api.Assertions._
-import org.junit.jupiter.api.{AfterEach, BeforeEach, Test, TestInfo}
+import org.junit.jupiter.api.{AfterEach, BeforeEach, TestInfo}
+import org.junit.jupiter.params.ParameterizedTest
+import org.junit.jupiter.params.provider.ValueSource
 
 import scala.jdk.CollectionConverters._
 import scala.collection.mutable.Buffer
@@ -42,16 +44,17 @@ import scala.concurrent.ExecutionException
 
 abstract class BaseProducerSendTest extends KafkaServerTestHarness {
 
-  def generateConfigs = {
+  def generateConfigs: scala.collection.Seq[KafkaConfig] = {
 val overridingProps = new Properties()
 val numServers = 2
 overridingProps.put(KafkaConfig.NumPartitionsProp, 4.toString)
-TestUtils.createBrokerConfigs(numServers, zkConnect, false, 
interBrokerSecurityProtocol = Some(securityProtocol),
+TestUtils.createBrokerConfigs(numServers, zkConnectOrNull, false, 
interBrokerSecurityProtocol = Some(securityProtocol),
   trustStoreFile = trustStoreFile, saslProperties = 
serverSaslProperties).map(KafkaConfig.fromProps(_, overridingProps))
   }
 
   private var consumer: KafkaConsumer[Array[Byte], Array[Byte]] = _
   private val producers = Buffer[KafkaProducer[Array[Byte], Array[Byte]]]()
+  protected var admin: Admin = null
 
   protected val topic = "topic"
   private val numRecords = 100
@@ -59,6 +62,15 @@ abstract class BaseProducerSendTest extends 
KafkaServerTestHarness {
   @BeforeEach
   override def setUp(testInfo: TestInfo): Unit = {
 super.setUp(testInfo)
+
+admin = TestUtils.createAdminClient(brokers, listenerName,
+TestUtils.securityConfigs(Mode.CLIENT,
+  securityProtocol,
+  trustStoreFile,
+  "adminClient",
+  TestUtils.SslCertificateCn,
+  clientSaslProperties))
+
 consumer = TestUtils.createConsumer(
   bootstrapServers(listenerName = 
ListenerName.forSecurityProtocol(SecurityProtocol.PLAINTEXT)),
   securityProtocol = SecurityProtocol.PLAINTEXT
@@ -70,6 +82,7 @@ abstract class BaseProducerSendTest extends 
KafkaServerTestHarness {
 consumer.close()
 // Ensure that all producers are closed since unclosed producers impact 

[kafka-site] branch MINOR_add_clickable_links_load_iframe updated (c68fc989 -> 80f57ce8)

2022-07-29 Thread bbejeck
This is an automated email from the ASF dual-hosted git repository.

bbejeck pushed a change to branch MINOR_add_clickable_links_load_iframe
in repository https://gitbox.apache.org/repos/asf/kafka-site.git


from c68fc989 Add clickable images to load iframe videos
 add 80f57ce8 Add instruction to click image

No new revisions were added by this update.

Summary of changes:
 intro.html  | 5 -
 quickstart.html | 6 --
 2 files changed, 8 insertions(+), 3 deletions(-)



[kafka-site] 01/01: Add clickable images to load iframe videos

2022-07-29 Thread bbejeck
This is an automated email from the ASF dual-hosted git repository.

bbejeck pushed a commit to branch MINOR_add_clickable_links_load_iframe
in repository https://gitbox.apache.org/repos/asf/kafka-site.git

commit c68fc989c6781a1c18c124e1348f42cb3e57bb52
Author: Bill Bejeck 
AuthorDate: Fri Jul 29 13:46:02 2022 -0400

Add clickable images to load iframe videos
---
 images/what_is_kafka.png | Bin 0 -> 89371 bytes
 intro.html   |  19 ++-
 quickstart.html  |  21 -
 3 files changed, 38 insertions(+), 2 deletions(-)

diff --git a/images/what_is_kafka.png b/images/what_is_kafka.png
new file mode 100644
index ..381fa4ae
Binary files /dev/null and b/images/what_is_kafka.png differ
diff --git a/intro.html b/intro.html
index 8afe7f13..67b443b3 100644
--- a/intro.html
+++ b/intro.html
@@ -1,12 +1,29 @@
 
 
 
+
+  function loadVideo () {
+const videoPlaceholder = document.getElementById("video_placeholder"); 
+const iframe = document.createElement('iframe');
+iframe.class="youtube-embed page-header-video-embed";
+iframe.width="480";
+iframe.height="270";
+iframe.src="https://www.youtube.com/embed/FKgi3n-FyNU?modestbranding=1";;
+iframe.frameborder="0";
+iframe.allow="accelerometer; autoplay; encrypted-media; gyroscope; 
picture-in-picture";
+iframe.setAttribute('allowFullScreen', '');
+videoPlaceholder.parentNode.replaceChild(iframe, videoPlaceholder);
+  }
+
 
   
   

   Introduction
-  https://www.youtube.com/embed/FKgi3n-FyNU;>Watch video: Everything you 
need to know about Kafka in 10 minutes
+  Everything you need to know about Kafka in 
10 minutes
+
+   
+
 
 
 
diff --git a/quickstart.html b/quickstart.html
index c04f1bdf..39559b3d 100644
--- a/quickstart.html
+++ b/quickstart.html
@@ -1,12 +1,31 @@
 
 
 
+
+  function loadVideo () {
+const videoPlaceholder = document.getElementById("video_placeholder"); 
+const iframe = document.createElement('iframe');
+iframe.class="youtube-embed page-header-video-embed";
+iframe.width="480";
+iframe.height="270";
+iframe.src="https://www.youtube.com/embed/FKgi3n-FyNU?modestbranding=1";;
+iframe.frameborder="0";
+iframe.allow="accelerometer; autoplay; encrypted-media; gyroscope; 
picture-in-picture";
+iframe.setAttribute('allowFullScreen', '');
+videoPlaceholder.parentNode.replaceChild(iframe, videoPlaceholder);
+  }
+
 
   
 
   Apache Kafka Quickstart
-  Interested in getting started with Kafka?  
Follow the instructions in this quickstart, or https://www.youtube.com/embed/FKgi3n-FyNU;>watch the video.
+  Everything you need to know about Kafka in 
10 minutes
+
+   
+
 
+
+
 
 
 



[kafka-site] branch MINOR_add_clickable_links_load_iframe created (now c68fc989)

2022-07-29 Thread bbejeck
This is an automated email from the ASF dual-hosted git repository.

bbejeck pushed a change to branch MINOR_add_clickable_links_load_iframe
in repository https://gitbox.apache.org/repos/asf/kafka-site.git


  at c68fc989 Add clickable images to load iframe videos

This branch includes the following new commits:

 new c68fc989 Add clickable images to load iframe videos

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.




[kafka] branch trunk updated: KAFKA-14108: Ensure both JUnit 4 and JUnit 5 tests run (#12441)

2022-07-29 Thread cadonna
This is an automated email from the ASF dual-hosted git repository.

cadonna pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 54af64c33a KAFKA-14108: Ensure both JUnit 4 and JUnit 5 tests run 
(#12441)
54af64c33a is described below

commit 54af64c33a1796bcea8a9990aa61c804d9fc0df4
Author: Christo Lolov 
AuthorDate: Fri Jul 29 16:21:25 2022 +0100

KAFKA-14108: Ensure both JUnit 4 and JUnit 5 tests run (#12441)

When the migration of the Streams project to JUnit 5 started with PR 
#12285, we discovered that the migrated tests were not run by the PR builds. 
This PR ensures that Streams' tests that are written in JUnit 4 and JUnit 5 are 
run in the PR builds.

Co-authored-by: Divij Vaidya 

Reviewers: Ismael Juma , Bruno Cadonna 

---
 build.gradle   | 33 ++
 .../integration/AdjustStreamThreadCountTest.java   |  5 ++--
 .../integration/EmitOnChangeIntegrationTest.java   |  5 ++--
 .../FineGrainedAutoResetIntegrationTest.java   |  5 ++--
 .../integration/GlobalKTableIntegrationTest.java   |  5 ++--
 .../integration/GlobalThreadShutDownOrderTest.java |  5 ++--
 ...ighAvailabilityTaskAssignorIntegrationTest.java |  5 ++--
 .../streams/integration/IQv2IntegrationTest.java   |  5 ++--
 .../integration/InternalTopicIntegrationTest.java  |  5 ++--
 .../KStreamAggregationDedupIntegrationTest.java|  5 ++--
 .../KStreamAggregationIntegrationTest.java |  5 ++--
 ...yInnerJoinCustomPartitionerIntegrationTest.java |  5 ++--
 .../KTableSourceTopicRestartIntegrationTest.java   |  8 ++
 .../integration/RackAwarenessIntegrationTest.java  |  5 ++--
 .../integration/RestoreIntegrationTest.java|  5 ++--
 15 files changed, 56 insertions(+), 50 deletions(-)

diff --git a/build.gradle b/build.gradle
index b228428441..54068f2977 100644
--- a/build.gradle
+++ b/build.gradle
@@ -323,7 +323,7 @@ subprojects {
   }
 
   // Remove the relevant project name once it's converted to JUnit 5
-  def shouldUseJUnit5 = !(["runtime", "streams"].contains(it.project.name))
+  def shouldUseJUnit5 = !(["runtime"].contains(it.project.name))
 
   def testLoggingEvents = ["passed", "skipped", "failed"]
   def testShowStandardStreams = false
@@ -466,8 +466,18 @@ subprojects {
 exclude testsToExclude
 
 if (shouldUseJUnit5) {
-  useJUnitPlatform {
-includeTags "integration"
+  if (project.name == 'streams') {
+useJUnitPlatform {
+  includeTags "integration"
+  includeTags "org.apache.kafka.test.IntegrationTest"
+ // Both engines are needed to run JUnit 4 tests alongside JUnit 5 
tests.
+  // junit-vintage (JUnit 4) can be removed once the JUnit 4 migration 
is complete.
+  includeEngines "junit-vintage", "junit-jupiter"
+}
+  } else {
+useJUnitPlatform {
+  includeTags "integration"
+}
   }
 } else {
   useJUnit {
@@ -503,8 +513,18 @@ subprojects {
 exclude testsToExclude
 
 if (shouldUseJUnit5) {
-  useJUnitPlatform {
-excludeTags "integration"
+  if (project.name == 'streams') {
+useJUnitPlatform {
+  excludeTags "integration"
+  excludeTags "org.apache.kafka.test.IntegrationTest"
+ // Both engines are needed to run JUnit 4 tests alongside JUnit 5 
tests.
+  // junit-vintage (JUnit 4) can be removed once the JUnit 4 migration 
is complete.
+  includeEngines "junit-vintage", "junit-jupiter"
+}
+  } else {
+useJUnitPlatform {
+  excludeTags "integration"
+}
   }
 } else {
   useJUnit {
@@ -1834,11 +1854,12 @@ project(':streams') {
 
 // testCompileOnly prevents streams from exporting a dependency on 
test-utils, which would cause a dependency cycle
 testCompileOnly project(':streams:test-utils')
+
 testImplementation project(':clients').sourceSets.test.output
 testImplementation project(':core')
 testImplementation project(':core').sourceSets.test.output
 testImplementation libs.log4j
-testImplementation libs.junitJupiterApi
+testImplementation libs.junitJupiter
 testImplementation libs.junitVintageEngine
 testImplementation libs.easymock
 testImplementation libs.powermockJunit4
diff --git 
a/streams/src/test/java/org/apache/kafka/streams/integration/AdjustStreamThreadCountTest.java
 
b/streams/src/test/java/org/apache/kafka/streams/integration/AdjustStreamThreadCountTest.java
index bf64068b94..59683e4460 100644
--- 
a/streams/src/test/java/org/apache/kafka/streams/integration/AdjustStreamThreadCountTest.java
+++ 
b/streams/src/test/java/org/apache/kafka/streams/integration/AdjustStreamThreadCountTest.java
@@ -32,13 +32,12 @@ import org.apache.kafka.streams.kstream.Transformer;
 import org.apache.kafka.streams.processor.ProcessorContext;
 import 

[kafka] branch trunk updated: MINOR: Remove code of removed metric (#12453)

2022-07-29 Thread cadonna
This is an automated email from the ASF dual-hosted git repository.

cadonna pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 5f7c99dd77 MINOR: Remove code of removed metric (#12453)
5f7c99dd77 is described below

commit 5f7c99dd77fc71982f8664d99bb3ce1b4924d34c
Author: Bruno Cadonna 
AuthorDate: Fri Jul 29 16:53:01 2022 +0200

MINOR: Remove code of removed metric (#12453)

When we removed metric skipped-records in 3.0 we missed to
remove some code related to that metric.

Reviewer: Guozhang Wang 
---
 .../processor/internals/metrics/ThreadMetrics.java   | 16 
 .../processor/internals/metrics/ThreadMetricsTest.java   |  2 --
 2 files changed, 18 deletions(-)

diff --git 
a/streams/src/main/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetrics.java
 
b/streams/src/main/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetrics.java
index 9c3e809497..eda173e532 100644
--- 
a/streams/src/main/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetrics.java
+++ 
b/streams/src/main/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetrics.java
@@ -45,7 +45,6 @@ public class ThreadMetrics {
 private static final String PUNCTUATE = "punctuate";
 private static final String CREATE_TASK = "task-created";
 private static final String CLOSE_TASK = "task-closed";
-private static final String SKIP_RECORD = "skipped-records";
 private static final String BLOCKED_TIME = "blocked-time-ns-total";
 private static final String THREAD_START_TIME = "thread-start-time";
 
@@ -79,9 +78,6 @@ public class ThreadMetrics {
 private static final String PUNCTUATE_RATE_DESCRIPTION = RATE_DESCRIPTION 
+ PUNCTUATE_DESCRIPTION;
 private static final String PUNCTUATE_AVG_LATENCY_DESCRIPTION = "The 
average punctuate latency";
 private static final String PUNCTUATE_MAX_LATENCY_DESCRIPTION = "The 
maximum punctuate latency";
-private static final String SKIP_RECORDS_DESCRIPTION = "skipped records";
-private static final String SKIP_RECORD_TOTAL_DESCRIPTION = 
TOTAL_DESCRIPTION + SKIP_RECORDS_DESCRIPTION;
-private static final String SKIP_RECORD_RATE_DESCRIPTION = 
RATE_DESCRIPTION + SKIP_RECORDS_DESCRIPTION;
 private static final String COMMIT_OVER_TASKS_DESCRIPTION =
 "calls to commit over all tasks assigned to one stream thread";
 private static final String COMMIT_OVER_TASKS_TOTAL_DESCRIPTION = 
TOTAL_DESCRIPTION + COMMIT_OVER_TASKS_DESCRIPTION;
@@ -123,18 +119,6 @@ public class ThreadMetrics {
 );
 }
 
-public static Sensor skipRecordSensor(final String threadId,
-  final StreamsMetricsImpl 
streamsMetrics) {
-return invocationRateAndCountSensor(
-threadId,
-SKIP_RECORD,
-SKIP_RECORD_RATE_DESCRIPTION,
-SKIP_RECORD_TOTAL_DESCRIPTION,
-RecordingLevel.INFO,
-streamsMetrics
-);
-}
-
 public static Sensor commitSensor(final String threadId,
   final StreamsMetricsImpl streamsMetrics) 
{
 return invocationRateAndCountAndAvgAndMaxLatencySensor(
diff --git 
a/streams/src/test/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetricsTest.java
 
b/streams/src/test/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetricsTest.java
index 3d2aaa20c8..2bbb6acb2a 100644
--- 
a/streams/src/test/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetricsTest.java
+++ 
b/streams/src/test/java/org/apache/kafka/streams/processor/internals/metrics/ThreadMetricsTest.java
@@ -371,8 +371,6 @@ public class ThreadMetricsTest {
 assertThat(sensor, is(expectedSensor));
 }
 }
-
-@Test
 public void shouldGetCreateTaskSensor() {
 final String operation = "task-created";
 final String totalDescription = "The total number of newly created 
tasks";