This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 1cdd82391e Simplify conditions on len() in other providers (#33569)
1cdd82391e is described below

commit 1cdd82391e0f7a24ab7f0badbe8f44a54f51d757
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Mon Aug 21 05:35:12 2023 +0000

    Simplify conditions on len() in other providers (#33569)
---
 airflow/providers/cncf/kubernetes/hooks/kubernetes.py        | 4 ++--
 airflow/providers/cncf/kubernetes/utils/pod_manager.py       | 6 +++---
 airflow/providers/databricks/hooks/databricks_sql.py         | 2 +-
 airflow/providers/databricks/operators/databricks_sql.py     | 2 +-
 airflow/providers/databricks/sensors/databricks_partition.py | 4 ++--
 airflow/providers/datadog/sensors/datadog.py                 | 2 +-
 airflow/providers/docker/operators/docker.py                 | 2 +-
 airflow/providers/imap/hooks/imap.py                         | 2 +-
 airflow/providers/openlineage/utils/sql.py                   | 2 +-
 airflow/providers/oracle/transfers/oracle_to_oracle.py       | 6 ++----
 airflow/providers/plexus/operators/job.py                    | 2 +-
 airflow/providers/ssh/hooks/ssh.py                           | 2 +-
 12 files changed, 17 insertions(+), 19 deletions(-)

diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py 
b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
index ddb8cb27ad..d4b09f7d11 100644
--- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
@@ -191,7 +191,7 @@ class KubernetesHook(BaseHook, PodOperatorHookProtocol):
         cluster_context = self._coalesce_param(self.cluster_context, 
self._get_field("cluster_context"))
         kubeconfig_path = self._coalesce_param(self.config_file, 
self._get_field("kube_config_path"))
         kubeconfig = self._get_field("kube_config")
-        num_selected_configuration = len([o for o in [in_cluster, kubeconfig, 
kubeconfig_path] if o])
+        num_selected_configuration = sum(1 for o in [in_cluster, kubeconfig, 
kubeconfig_path] if o)
 
         if num_selected_configuration > 1:
             raise AirflowException(
@@ -474,7 +474,7 @@ class AsyncKubernetesHook(KubernetesHook):
         kubeconfig_path = self._coalesce_param(self.config_file, await 
self._get_field("kube_config_path"))
         kubeconfig = await self._get_field("kube_config")
 
-        num_selected_configuration = len([o for o in [in_cluster, kubeconfig, 
kubeconfig_path] if o])
+        num_selected_configuration = sum(1 for o in [in_cluster, kubeconfig, 
kubeconfig_path] if o)
 
         if num_selected_configuration > 1:
             raise AirflowException(
diff --git a/airflow/providers/cncf/kubernetes/utils/pod_manager.py 
b/airflow/providers/cncf/kubernetes/utils/pod_manager.py
index 650be681d3..139befdbff 100644
--- a/airflow/providers/cncf/kubernetes/utils/pod_manager.py
+++ b/airflow/providers/cncf/kubernetes/utils/pod_manager.py
@@ -443,9 +443,7 @@ class PodManager(LoggingMixin):
         """
         pod_logging_statuses = []
         all_containers = self.get_container_names(pod)
-        if len(all_containers) == 0:
-            self.log.error("Could not retrieve containers for the pod: %s", 
pod.metadata.name)
-        else:
+        if all_containers:
             if isinstance(container_logs, str):
                 # fetch logs only for requested container if only one 
container is provided
                 if container_logs in all_containers:
@@ -490,6 +488,8 @@ class PodManager(LoggingMixin):
                     self.log.error(
                         "Invalid type %s specified for container names input 
parameter", type(container_logs)
                     )
+        else:
+            self.log.error("Could not retrieve containers for the pod: %s", 
pod.metadata.name)
 
         return pod_logging_statuses
 
diff --git a/airflow/providers/databricks/hooks/databricks_sql.py 
b/airflow/providers/databricks/hooks/databricks_sql.py
index 31c816f39b..90b17d8ab0 100644
--- a/airflow/providers/databricks/hooks/databricks_sql.py
+++ b/airflow/providers/databricks/hooks/databricks_sql.py
@@ -92,7 +92,7 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
         if "endpoints" not in result:
             raise AirflowException("Can't list Databricks SQL endpoints")
         lst = [endpoint for endpoint in result["endpoints"] if 
endpoint["name"] == endpoint_name]
-        if len(lst) == 0:
+        if not lst:
             raise AirflowException(f"Can't f Databricks SQL endpoint with name 
'{endpoint_name}'")
         return lst[0]
 
diff --git a/airflow/providers/databricks/operators/databricks_sql.py 
b/airflow/providers/databricks/operators/databricks_sql.py
index 2561b380fa..a209ed7c18 100644
--- a/airflow/providers/databricks/operators/databricks_sql.py
+++ b/airflow/providers/databricks/operators/databricks_sql.py
@@ -297,7 +297,7 @@ class DatabricksCopyIntoOperator(BaseOperator):
         escape_key: bool = True,
     ) -> str:
         formatted_opts = ""
-        if opts is not None and len(opts) > 0:
+        if opts:
             pairs = [
                 f"{escaper.escape_item(k) if escape_key else k} = 
{escaper.escape_item(v)}"
                 for k, v in opts.items()
diff --git a/airflow/providers/databricks/sensors/databricks_partition.py 
b/airflow/providers/databricks/sensors/databricks_partition.py
index fe50c6e1b3..98ad0f461a 100644
--- a/airflow/providers/databricks/sensors/databricks_partition.py
+++ b/airflow/providers/databricks/sensors/databricks_partition.py
@@ -184,7 +184,7 @@ class DatabricksPartitionSensor(BaseSensorOperator):
         if len(partition_columns) < 1:
             raise AirflowException(f"Table {table_name} does not have 
partitions")
         formatted_opts = ""
-        if opts is not None and len(opts) > 0:
+        if opts:
             output_list = []
             for partition_col, partition_value in opts.items():
                 if escape_key:
@@ -217,7 +217,7 @@ class DatabricksPartitionSensor(BaseSensorOperator):
         """Checks the table partitions and returns the results."""
         partition_result = self._check_table_partitions()
         self.log.debug("Partition sensor result: %s", partition_result)
-        if len(partition_result) >= 1:
+        if partition_result:
             return True
         else:
             raise AirflowException(f"Specified partition(s): {self.partitions} 
were not found.")
diff --git a/airflow/providers/datadog/sensors/datadog.py 
b/airflow/providers/datadog/sensors/datadog.py
index da80049e66..b5da363e84 100644
--- a/airflow/providers/datadog/sensors/datadog.py
+++ b/airflow/providers/datadog/sensors/datadog.py
@@ -96,4 +96,4 @@ class DatadogSensor(BaseSensorOperator):
             return self.response_check(response)
 
         # If no check was inserted, assume any event that matched yields true.
-        return len(response) > 0
+        return bool(response)
diff --git a/airflow/providers/docker/operators/docker.py 
b/airflow/providers/docker/operators/docker.py
index 70fc299421..96f78baa9b 100644
--- a/airflow/providers/docker/operators/docker.py
+++ b/airflow/providers/docker/operators/docker.py
@@ -413,7 +413,7 @@ class DockerOperator(BaseOperator):
             if self.retrieve_output:
                 return self._attempt_to_retrieve_result()
             elif self.do_xcom_push:
-                if len(log_lines) == 0:
+                if not log_lines:
                     return None
                 try:
                     if self.xcom_all:
diff --git a/airflow/providers/imap/hooks/imap.py 
b/airflow/providers/imap/hooks/imap.py
index ea62a46e3a..3e214a5fea 100644
--- a/airflow/providers/imap/hooks/imap.py
+++ b/airflow/providers/imap/hooks/imap.py
@@ -130,7 +130,7 @@ class ImapHook(BaseHook):
         mail_attachments = self._retrieve_mails_attachments_by_name(
             name, check_regex, True, mail_folder, mail_filter
         )
-        return len(mail_attachments) > 0
+        return bool(mail_attachments)
 
     def retrieve_mail_attachments(
         self,
diff --git a/airflow/providers/openlineage/utils/sql.py 
b/airflow/providers/openlineage/utils/sql.py
index 5d2fa80cd6..3c87b04bb6 100644
--- a/airflow/providers/openlineage/utils/sql.py
+++ b/airflow/providers/openlineage/utils/sql.py
@@ -73,7 +73,7 @@ class TableSchema:
         return Dataset(
             namespace=namespace,
             name=name,
-            facets={"schema": SchemaDatasetFacet(fields=self.fields)} if 
len(self.fields) > 0 else {},
+            facets={"schema": SchemaDatasetFacet(fields=self.fields)} if 
self.fields else {},
         )
 
 
diff --git a/airflow/providers/oracle/transfers/oracle_to_oracle.py 
b/airflow/providers/oracle/transfers/oracle_to_oracle.py
index 6762a5742f..192df8c78d 100644
--- a/airflow/providers/oracle/transfers/oracle_to_oracle.py
+++ b/airflow/providers/oracle/transfers/oracle_to_oracle.py
@@ -72,13 +72,11 @@ class OracleToOracleOperator(BaseOperator):
             target_fields = list(map(lambda field: field[0], 
cursor.description))
 
             rows_total = 0
-            rows = cursor.fetchmany(self.rows_chunk)
-            while rows:
-                rows_total += len(rows)
+            for rows in iter(lambda: cursor.fetchmany(self.rows_chunk), []):
                 dest_hook.bulk_insert_rows(
                     self.destination_table, rows, target_fields=target_fields, 
commit_every=self.rows_chunk
                 )
-                rows = cursor.fetchmany(self.rows_chunk)
+                rows_total += len(rows)
                 self.log.info("Total inserted: %s rows", rows_total)
 
             self.log.info("Finished data transfer.")
diff --git a/airflow/providers/plexus/operators/job.py 
b/airflow/providers/plexus/operators/job.py
index 5eb83f7b6b..21002dc076 100644
--- a/airflow/providers/plexus/operators/job.py
+++ b/airflow/providers/plexus/operators/job.py
@@ -144,7 +144,7 @@ class PlexusJobOperator(BaseOperator):
         :param hook: plexus hook object
         """
         missing_params = self.required_params - set(self.job_params)
-        if len(missing_params) > 0:
+        if missing_params:
             raise AirflowException(f"Missing the following required 
job_params: {', '.join(missing_params)}")
         params = {}
         for prm in self.job_params:
diff --git a/airflow/providers/ssh/hooks/ssh.py 
b/airflow/providers/ssh/hooks/ssh.py
index 1db70b4b12..e40354e1ac 100644
--- a/airflow/providers/ssh/hooks/ssh.py
+++ b/airflow/providers/ssh/hooks/ssh.py
@@ -512,7 +512,7 @@ class SSHHook(BaseHook):
         while not channel.closed or channel.recv_ready() or 
channel.recv_stderr_ready():
             readq, _, _ = select([channel], [], [], cmd_timeout)
             if cmd_timeout is not None:
-                timedout = len(readq) == 0
+                timedout = not readq
             for recv in readq:
                 if recv.recv_ready():
                     output = stdout.channel.recv(len(recv.in_buffer))

Reply via email to