jolshan commented on a change in pull request #9769:
URL: https://github.com/apache/kafka/pull/9769#discussion_r563861257



##########
File path: core/src/main/scala/kafka/admin/TopicCommand.scala
##########
@@ -313,42 +313,57 @@ object TopicCommand extends Logging {
     }
 
     override def describeTopic(opts: TopicCommandOptions): Unit = {
-      val topics = getTopics(opts.topic, opts.excludeInternalTopics)
-      ensureTopicExists(topics, opts.topic, !opts.ifExists)
+      val topicId = opts.topicId.map(Uuid.fromString).filter(_ != 
Uuid.ZERO_UUID)
+      // if topicId is provided and not zero, will use topicId regardless of 
topic name

Review comment:
       I don't have an issue with this, but just curious why this was chosen 
over failing.

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1190,8 +1192,31 @@ class KafkaApis(val requestChannel: RequestChannel,
     val metadataRequest = request.body[MetadataRequest]
     val requestVersion = request.header.apiVersion
 
+    // Check if topicId is presented firstly.
+    val topicIds = metadataRequest.topicIds.asScala.toSet.filterNot(_ == 
Uuid.ZERO_UUID)
+    val supportedVersionTopicIds = if (config.interBrokerProtocolVersion >= 
KAFKA_2_8_IV1) topicIds else Set.empty[Uuid]
+
+    val unsupportedVersionTopicIds = topicIds.diff(supportedVersionTopicIds)

Review comment:
       I'm curious if this diff is needed. It seems that the above check will 
result in all topicIds being left out or all of them being included.

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1213,8 +1238,7 @@ class KafkaApis(val requestChannel: RequestChannel,
     }
 
     val unauthorizedForCreateTopicMetadata = 
unauthorizedForCreateTopics.map(topic =>
-      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
isInternal(topic), util.Collections.emptyList()))
-
+      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), isInternal(topic), 
util.Collections.emptyList()))

Review comment:
       Does this potentially leak the topic name when we are using topic IDs?

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1223,7 +1247,7 @@ class KafkaApis(val requestChannel: RequestChannel,
         Set.empty[MetadataResponseTopic]
       else
         unauthorizedForDescribeTopics.map(topic =>
-          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
false, util.Collections.emptyList()))
+          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), false, util.Collections.emptyList()))

Review comment:
       Same here.

##########
File path: 
core/src/test/scala/integration/kafka/server/MetadataRequestBetweenDifferentIbpTest.scala
##########
@@ -0,0 +1,125 @@
+/**
+  * Licensed to the Apache Software Foundation (ASF) under one or more
+  * contributor license agreements.  See the NOTICE file distributed with
+  * this work for additional information regarding copyright ownership.
+  * The ASF licenses this file to You under the Apache License, Version 2.0
+  * (the "License"); you may not use this file except in compliance with
+  * the License.  You may obtain a copy of the License at
+  *
+  *    http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package integration.kafka.server
+
+import kafka.api.{ApiVersion, KAFKA_2_8_IV0, KAFKA_2_8_IV1}
+import kafka.network.SocketServer
+import kafka.server.{BaseRequestTest, KafkaConfig}
+import kafka.utils.TestUtils
+import kafka.zk.ZkVersion
+import org.apache.kafka.common.Uuid
+import org.apache.kafka.common.message.MetadataRequestData
+import org.apache.kafka.common.protocol.Errors
+import org.apache.kafka.common.requests.{MetadataRequest, MetadataResponse}
+import org.junit.jupiter.api.Assertions._
+import org.junit.jupiter.api.Test
+
+import scala.collection.{Map, Seq}
+
+class MetadataRequestBetweenDifferentIbpTest extends BaseRequestTest {

Review comment:
       This test is super cool and will be very useful when testing other 
request types like fetch. 

##########
File path: 
core/src/test/scala/integration/kafka/server/MetadataRequestBetweenDifferentIbpTest.scala
##########
@@ -0,0 +1,125 @@
+/**
+  * Licensed to the Apache Software Foundation (ASF) under one or more
+  * contributor license agreements.  See the NOTICE file distributed with
+  * this work for additional information regarding copyright ownership.
+  * The ASF licenses this file to You under the Apache License, Version 2.0
+  * (the "License"); you may not use this file except in compliance with
+  * the License.  You may obtain a copy of the License at
+  *
+  *    http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package integration.kafka.server
+
+import kafka.api.{ApiVersion, KAFKA_2_8_IV0, KAFKA_2_8_IV1}
+import kafka.network.SocketServer
+import kafka.server.{BaseRequestTest, KafkaConfig}
+import kafka.utils.TestUtils
+import kafka.zk.ZkVersion
+import org.apache.kafka.common.Uuid
+import org.apache.kafka.common.message.MetadataRequestData
+import org.apache.kafka.common.protocol.Errors
+import org.apache.kafka.common.requests.{MetadataRequest, MetadataResponse}
+import org.junit.jupiter.api.Assertions._
+import org.junit.jupiter.api.Test
+
+import scala.collection.{Map, Seq}
+
+class MetadataRequestBetweenDifferentIbpTest extends BaseRequestTest {
+
+  override def brokerCount: Int = 3
+  override def generateConfigs: Seq[KafkaConfig] = {
+    Seq(
+      createConfig(0, KAFKA_2_8_IV0),
+      createConfig(1, KAFKA_2_8_IV1),
+      createConfig(2, KAFKA_2_8_IV1)
+    )
+  }
+
+  @Test
+  def testTopicIdUnsupported(): Unit = {
+
+    val topic = "topic"
+
+    // Ensure controller version = KAFKA_2_8_IV1, and then create a topic
+    ensureControllerIn(Seq(1, 2))
+    createTopic(topic,  Map(0 -> Seq(1, 2, 0), 1 -> Seq(2, 0, 1)))
+
+    // We can get topicId from the controller
+    val resp1 = sendMetadataRequest(new MetadataRequest(requestData(topic, 
Uuid.ZERO_UUID), 10.toShort), controllerSocketServer)
+    val topicId = resp1.topicMetadata.iterator().next().topicId()
+    assertNotEquals(Uuid.ZERO_UUID, topicId)
+    assertNotNull(topicId)
+
+    // Send request to a broker whose version=KAFKA_2_8_IV0
+    val resp2 = sendMetadataRequest(new MetadataRequest(requestData(topic, 
topicId), 10.toShort), brokerSocketServer(0))
+    assertEquals(Errors.UNSUPPORTED_VERSION, 
resp2.topicMetadata.iterator().next().error())
+  }
+
+  @Test
+  def testUnknownTopicId(): Unit = {
+
+    val topic = "topic"
+
+    // Kill controller and restart until broker 2 become controller
+    ensureControllerIn(Seq(1, 2))
+    createTopic(topic, Map(0 -> Seq(1, 2, 0), 1 -> Seq(2, 0, 1)))
+
+    val resp1 = sendMetadataRequest(new MetadataRequest(requestData(topic, 
Uuid.ZERO_UUID), 10.toShort), controllerSocketServer)
+    val topicId = resp1.topicMetadata.iterator().next().topicId()
+
+    // We could still get topic metadata by topicId
+    val topicMetadata = sendMetadataRequest(new 
MetadataRequest(requestData(null, topicId), 10.toShort), controllerSocketServer)
+      .topicMetadata.iterator().next()
+    assertEquals(topicId, topicMetadata.topicId())
+    assertEquals(topic, topicMetadata.topic())
+
+    // Make the broker whose version=KAFKA_2_8_IV0 controller
+    ensureControllerIn(Seq(0))
+
+    // Restart the broker whose version=KAFKA_2_8_IV1, and the controller will 
send metadata request to it
+    killBroker(1)
+    restartDeadBrokers()
+
+    // Send request to a broker whose version=KAFKA_2_8_IV1 and restarted just 
now
+    val resp2 = sendMetadataRequest(new MetadataRequest(requestData(topic, 
topicId), 10.toShort), brokerSocketServer(1))
+    assertEquals(Errors.UNKNOWN_TOPIC_ID, 
resp2.topicMetadata.iterator().next().error())

Review comment:
       This scenario is a bit of a corner case and maybe not a huge deal either 
way, but does it make more sense to respond with UNKNOWN_TOPIC_ID or 
UNSUPPORTED_VERSION here? Maybe either is fine since we can expect 
UNKNOWN_TOPIC_ID or UNSUPPORTED_VERSION during an upgrade like this anyway.

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1213,8 +1238,7 @@ class KafkaApis(val requestChannel: RequestChannel,
     }
 
     val unauthorizedForCreateTopicMetadata = 
unauthorizedForCreateTopics.map(topic =>
-      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
isInternal(topic), util.Collections.emptyList()))
-
+      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), isInternal(topic), 
util.Collections.emptyList()))

Review comment:
       (and if we give ID when using name, we acknowledge it exists)

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1213,8 +1238,7 @@ class KafkaApis(val requestChannel: RequestChannel,
     }
 
     val unauthorizedForCreateTopicMetadata = 
unauthorizedForCreateTopics.map(topic =>
-      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
isInternal(topic), util.Collections.emptyList()))
-
+      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), isInternal(topic), 
util.Collections.emptyList()))

Review comment:
       Does this potentially leak the topic name when we are using topic IDs?

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1213,8 +1238,7 @@ class KafkaApis(val requestChannel: RequestChannel,
     }
 
     val unauthorizedForCreateTopicMetadata = 
unauthorizedForCreateTopics.map(topic =>
-      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
isInternal(topic), util.Collections.emptyList()))
-
+      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), isInternal(topic), 
util.Collections.emptyList()))

Review comment:
       (and if we give ID when using name, we acknowledge it exists)

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1223,7 +1247,7 @@ class KafkaApis(val requestChannel: RequestChannel,
         Set.empty[MetadataResponseTopic]
       else
         unauthorizedForDescribeTopics.map(topic =>
-          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
false, util.Collections.emptyList()))
+          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), false, util.Collections.emptyList()))

Review comment:
       Does this potentially leak the topic name when we are using topic IDs? 
And potentially acknowledge a topic exists if we give a valid id?

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1213,8 +1238,7 @@ class KafkaApis(val requestChannel: RequestChannel,
     }
 
     val unauthorizedForCreateTopicMetadata = 
unauthorizedForCreateTopics.map(topic =>
-      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
isInternal(topic), util.Collections.emptyList()))
-
+      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), isInternal(topic), 
util.Collections.emptyList()))

Review comment:
       I think we only care about authorizedForCreate when using topic names. 
but just want to make sure we don't give a name/id away

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1213,8 +1238,7 @@ class KafkaApis(val requestChannel: RequestChannel,
     }
 
     val unauthorizedForCreateTopicMetadata = 
unauthorizedForCreateTopics.map(topic =>
-      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
isInternal(topic), util.Collections.emptyList()))
-
+      metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), isInternal(topic), 
util.Collections.emptyList()))

Review comment:
       I think this usage is fine since we will only get here if use the topic 
name version of the request and the topic does not exist.

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1223,7 +1247,7 @@ class KafkaApis(val requestChannel: RequestChannel,
         Set.empty[MetadataResponseTopic]
       else
         unauthorizedForDescribeTopics.map(topic =>
-          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
false, util.Collections.emptyList()))
+          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), false, util.Collections.emptyList()))

Review comment:
       Does this potentially acknowledge a topic exists if we give a valid id?

##########
File path: core/src/main/scala/kafka/server/KafkaApis.scala
##########
@@ -1223,7 +1247,7 @@ class KafkaApis(val requestChannel: RequestChannel,
         Set.empty[MetadataResponseTopic]
       else
         unauthorizedForDescribeTopics.map(topic =>
-          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
false, util.Collections.emptyList()))
+          metadataResponseTopic(Errors.TOPIC_AUTHORIZATION_FAILED, topic, 
metadataCache.getTopicId(topic), false, util.Collections.emptyList()))

Review comment:
       We should check some of the other error responses to make sure we use 
name/id appropriately




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to