ivandasch commented on a change in pull request #9196:
URL: https://github.com/apache/kafka/pull/9196#discussion_r475086470



##########
File path: tests/kafkatest/benchmarks/core/benchmark_test.py
##########
@@ -88,7 +88,7 @@ def test_producer_throughput(self, acks, topic, 
num_producers=1, message_size=DE
         self.validate_versions(client_version, broker_version)
         self.start_kafka(security_protocol, security_protocol, broker_version, 
tls_version)
         # Always generate the same total amount of data
-        nrecords = int(self.target_data_size / message_size)
+        nrecords = int(self.target_data_size // message_size)

Review comment:
       No need to explicit conversion to int (result is already int)

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -372,7 +372,7 @@ def current_position(self, tp):
 
     def owner(self, tp):
         with self.lock:
-            for handler in self.event_handlers.itervalues():
+            for handler in iter(self.event_handlers.values()):

Review comment:
       No need to create iterator from iterable

##########
File path: tests/kafkatest/tests/client/client_compatibility_features_test.py
##########
@@ -86,14 +88,14 @@ def invoke_compatibility_program(self, features):
                "--topic %s " % (self.zk.path.script("kafka-run-class.sh", 
node),
                                self.kafka.bootstrap_servers(),
                                len(self.kafka.nodes),
-                               self.topics.keys()[0]))
-        for k, v in features.iteritems():
+                               [*self.topics.keys()][0]))
+        for k, v in iter(features.items()):

Review comment:
       No need to create iterator from iterable

##########
File path: tests/kafkatest/tests/connect/connect_distributed_test.py
##########
@@ -420,11 +421,14 @@ def test_bounce(self, clean, connect_protocol):
             src_seqnos = [msg['seqno'] for msg in src_messages if msg['task'] 
== task]
             # Every seqno up to the largest one we ever saw should appear. 
Each seqno should only appear once because clean
             # bouncing should commit on rebalance.
-            src_seqno_max = max(src_seqnos)
+            if len(src_seqnos) == 0:
+                src_seqno_max = 0
+            else:
+                src_seqno_max = max(src_seqnos)
             self.logger.debug("Max source seqno: %d", src_seqno_max)
             src_seqno_counts = Counter(src_seqnos)
             missing_src_seqnos = 
sorted(set(range(src_seqno_max)).difference(set(src_seqnos)))
-            duplicate_src_seqnos = sorted([seqno for seqno,count in 
src_seqno_counts.iteritems() if count > 1])
+            duplicate_src_seqnos = sorted([seqno for seqno,count in 
iter(src_seqno_counts.items()) if count > 1])

Review comment:
       No need to create list and iterator, just
   `sorted(seqno for seqno, count in src_seqno_counts.items() if count > 1)`

##########
File path: tests/kafkatest/services/performance/producer_performance.py
##########
@@ -78,7 +78,7 @@ def start_cmd(self, node):
             'bootstrap_servers': 
self.kafka.bootstrap_servers(self.security_config.security_protocol),
             'client_id': self.client_id,
             'kafka_run_class': self.path.script("kafka-run-class.sh", node),
-            'metrics_props': ' '.join(["%s=%s" % (k, v) for k, v in 
self.http_metrics_client_configs.iteritems()])
+            'metrics_props': ' '.join(["%s=%s" % (k, v) for k, v in 
iter(self.http_metrics_client_configs.items())])

Review comment:
       No need to create iterator from items(), join can accept generator 
easily, no need to convert it to list.

##########
File path: tests/kafkatest/services/monitor/http.py
##########
@@ -114,7 +114,7 @@ def metrics(self, host=None, client_id=None, name=None, 
group=None, tags=None):
         Get any collected metrics that match the specified parameters, 
yielding each as a tuple of
         (key, [<timestamp, value>, ...]) values.
         """
-        for k, values in self._http_metrics.iteritems():
+        for k, values in iter(self._http_metrics.items()):

Review comment:
       redundant conversion to iterator (.items() is already iterable)

##########
File path: tests/kafkatest/services/security/security_config.py
##########
@@ -362,7 +362,7 @@ def props(self, prefix=''):
             return ""
         if self.has_sasl and not self.static_jaas_conf and 'sasl.jaas.config' 
not in self.properties:
             raise Exception("JAAS configuration property has not yet been 
initialized")
-        config_lines = (prefix + key + "=" + value for key, value in 
self.properties.iteritems())
+        config_lines = (prefix + key + "=" + value for key, value in 
iter(self.properties.items()))

Review comment:
       No need to create iterator from iterable

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -361,7 +361,7 @@ def clean_node(self, node):
 
     def current_assignment(self):
         with self.lock:
-            return { handler.node: handler.current_assignment() for handler in 
self.event_handlers.itervalues() }
+            return { handler.node: handler.current_assignment() for handler in 
iter(self.event_handlers.values()) }

Review comment:
       No need to create iterator from iterable

##########
File path: tests/kafkatest/services/monitor/http.py
##########
@@ -154,7 +154,7 @@ def do_POST(self):
             name = raw_metric['name']
             group = raw_metric['group']
             # Convert to tuple of pairs because dicts & lists are unhashable
-            tags = tuple([(k, v) for k, v in raw_metric['tags'].iteritems()]),
+            tags = tuple([(k, v) for k, v in 
iter(raw_metric['tags'].items())]),

Review comment:
       Same as above + tuple can be constructed from generator, no need to 
create list for that 

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -386,33 +386,33 @@ def last_commit(self, tp):
 
     def total_consumed(self):
         with self.lock:
-            return sum(handler.total_consumed for handler in 
self.event_handlers.itervalues())
+            return sum(handler.total_consumed for handler in 
iter(self.event_handlers.values()))

Review comment:
       Same as above

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -386,33 +386,33 @@ def last_commit(self, tp):
 
     def total_consumed(self):
         with self.lock:
-            return sum(handler.total_consumed for handler in 
self.event_handlers.itervalues())
+            return sum(handler.total_consumed for handler in 
iter(self.event_handlers.values()))
 
     def num_rebalances(self):
         with self.lock:
-            return max(handler.assigned_count for handler in 
self.event_handlers.itervalues())
+            return max(handler.assigned_count for handler in 
iter(self.event_handlers.values()))
 
     def num_revokes_for_alive(self, keep_alive=1):
         with self.lock:
-            return max([handler.revoked_count for handler in 
self.event_handlers.itervalues()
+            return max([handler.revoked_count for handler in 
iter(self.event_handlers.values())

Review comment:
       Same as above

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -386,33 +386,33 @@ def last_commit(self, tp):
 
     def total_consumed(self):
         with self.lock:
-            return sum(handler.total_consumed for handler in 
self.event_handlers.itervalues())
+            return sum(handler.total_consumed for handler in 
iter(self.event_handlers.values()))
 
     def num_rebalances(self):
         with self.lock:
-            return max(handler.assigned_count for handler in 
self.event_handlers.itervalues())
+            return max(handler.assigned_count for handler in 
iter(self.event_handlers.values()))

Review comment:
       Same as above.

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -386,33 +386,33 @@ def last_commit(self, tp):
 
     def total_consumed(self):
         with self.lock:
-            return sum(handler.total_consumed for handler in 
self.event_handlers.itervalues())
+            return sum(handler.total_consumed for handler in 
iter(self.event_handlers.values()))
 
     def num_rebalances(self):
         with self.lock:
-            return max(handler.assigned_count for handler in 
self.event_handlers.itervalues())
+            return max(handler.assigned_count for handler in 
iter(self.event_handlers.values()))
 
     def num_revokes_for_alive(self, keep_alive=1):
         with self.lock:
-            return max([handler.revoked_count for handler in 
self.event_handlers.itervalues()
+            return max([handler.revoked_count for handler in 
iter(self.event_handlers.values())
                        if handler.idx <= keep_alive])
 
     def joined_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())
                     if handler.state == ConsumerState.Joined]
 
     def rebalancing_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())
                     if handler.state == ConsumerState.Rebalancing]
 
     def dead_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())

Review comment:
       Same as above

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -386,33 +386,33 @@ def last_commit(self, tp):
 
     def total_consumed(self):
         with self.lock:
-            return sum(handler.total_consumed for handler in 
self.event_handlers.itervalues())
+            return sum(handler.total_consumed for handler in 
iter(self.event_handlers.values()))
 
     def num_rebalances(self):
         with self.lock:
-            return max(handler.assigned_count for handler in 
self.event_handlers.itervalues())
+            return max(handler.assigned_count for handler in 
iter(self.event_handlers.values()))
 
     def num_revokes_for_alive(self, keep_alive=1):
         with self.lock:
-            return max([handler.revoked_count for handler in 
self.event_handlers.itervalues()
+            return max([handler.revoked_count for handler in 
iter(self.event_handlers.values())
                        if handler.idx <= keep_alive])
 
     def joined_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())
                     if handler.state == ConsumerState.Joined]
 
     def rebalancing_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())

Review comment:
       Same as above

##########
File path: tests/kafkatest/services/verifiable_consumer.py
##########
@@ -386,33 +386,33 @@ def last_commit(self, tp):
 
     def total_consumed(self):
         with self.lock:
-            return sum(handler.total_consumed for handler in 
self.event_handlers.itervalues())
+            return sum(handler.total_consumed for handler in 
iter(self.event_handlers.values()))
 
     def num_rebalances(self):
         with self.lock:
-            return max(handler.assigned_count for handler in 
self.event_handlers.itervalues())
+            return max(handler.assigned_count for handler in 
iter(self.event_handlers.values()))
 
     def num_revokes_for_alive(self, keep_alive=1):
         with self.lock:
-            return max([handler.revoked_count for handler in 
self.event_handlers.itervalues()
+            return max([handler.revoked_count for handler in 
iter(self.event_handlers.values())
                        if handler.idx <= keep_alive])
 
     def joined_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())
                     if handler.state == ConsumerState.Joined]
 
     def rebalancing_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())
                     if handler.state == ConsumerState.Rebalancing]
 
     def dead_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())
                     if handler.state == ConsumerState.Dead]
 
     def alive_nodes(self):
         with self.lock:
-            return [handler.node for handler in 
self.event_handlers.itervalues()
+            return [handler.node for handler in 
iter(self.event_handlers.values())

Review comment:
       Same as above

##########
File path: tests/kafkatest/tests/client/client_compatibility_features_test.py
##########
@@ -86,14 +88,14 @@ def invoke_compatibility_program(self, features):
                "--topic %s " % (self.zk.path.script("kafka-run-class.sh", 
node),
                                self.kafka.bootstrap_servers(),
                                len(self.kafka.nodes),
-                               self.topics.keys()[0]))
-        for k, v in features.iteritems():
+                               [*self.topics.keys()][0]))

Review comment:
       I suppose that next(iter(self.topics.keys())) is more readable, than 
this mix of brackets and stars

##########
File path: tests/kafkatest/tests/connect/connect_distributed_test.py
##########
@@ -420,11 +421,14 @@ def test_bounce(self, clean, connect_protocol):
             src_seqnos = [msg['seqno'] for msg in src_messages if msg['task'] 
== task]
             # Every seqno up to the largest one we ever saw should appear. 
Each seqno should only appear once because clean
             # bouncing should commit on rebalance.
-            src_seqno_max = max(src_seqnos)
+            if len(src_seqnos) == 0:
+                src_seqno_max = 0
+            else:
+                src_seqno_max = max(src_seqnos)

Review comment:
       `src_seqno_max = max(src_seqnos) if len(src_seqnos) else 0` is more 
readable

##########
File path: tests/kafkatest/version.py
##########
@@ -49,6 +49,34 @@ def __str__(self):
         else:
             return LooseVersion.__str__(self)
 
+    def __eq__(self, other):

Review comment:
       If you start to refactor this class, may be refactor also call to super 
constructor? LooseVersion is now normal class (in python3). Just 
`super().__init__(version_string)`

##########
File path: tests/kafkatest/tests/end_to_end.py
##########
@@ -87,7 +85,7 @@ def on_record_consumed(self, record, node):
 
     def await_consumed_offsets(self, last_acked_offsets, timeout_sec):
         def has_finished_consuming():
-            for partition, offset in last_acked_offsets.iteritems():
+            for partition, offset in iter(last_acked_offsets.items()):

Review comment:
       No need to create iterator

##########
File path: tests/kafkatest/tests/verifiable_consumer_test.py
##########
@@ -40,7 +40,7 @@ def _all_partitions(self, topic, num_partitions):
 
     def _partitions(self, assignment):
         partitions = []
-        for parts in assignment.itervalues():
+        for parts in iter(assignment.values()):

Review comment:
       No need to create iterator

##########
File path: tests/kafkatest/tests/streams/streams_broker_bounce_test.py
##########
@@ -119,7 +119,7 @@ def __init__(self, test_context):
     def fail_broker_type(self, failure_mode, broker_type):
         # Pick a random topic and bounce it's leader
         topic_index = randint(0, len(self.topics.keys()) - 1)
-        topic = self.topics.keys()[topic_index]
+        topic = [*self.topics.keys()][topic_index]

Review comment:
       `list(self.topics.keys())[topic_index]` is more readable

##########
File path: tests/kafkatest/tests/client/quota_test.py
##########
@@ -162,7 +162,7 @@ def test_quota(self, quota_type, override_quota=True, 
producer_num=1, consumer_n
             jmx_attributes=['bytes-consumed-rate'], version=client_version)
         consumer.run()
 
-        for idx, messages in consumer.messages_consumed.iteritems():
+        for idx, messages in iter(consumer.messages_consumed.items()):

Review comment:
       No need to create iterator




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to