This is an automated email from the ASF dual-hosted git repository.
taragolis pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 633217c842 Simplify conditions on len() in providers/amazon (#33565)
633217c842 is described below
commit 633217c84217f925c4c0a92e0b24ba314b64281b
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Thu Aug 24 22:13:35 2023 +0000
Simplify conditions on len() in providers/amazon (#33565)
---
airflow/providers/amazon/aws/hooks/emr.py | 2 +-
airflow/providers/amazon/aws/hooks/s3.py | 2 +-
airflow/providers/amazon/aws/hooks/sagemaker.py | 9 ++++-----
airflow/providers/amazon/aws/operators/sagemaker.py | 2 +-
airflow/providers/amazon/aws/sensors/batch.py | 4 ++--
airflow/providers/amazon/aws/sensors/s3.py | 2 +-
airflow/providers/amazon/aws/sensors/sqs.py | 4 ++--
7 files changed, 12 insertions(+), 13 deletions(-)
diff --git a/airflow/providers/amazon/aws/hooks/emr.py
b/airflow/providers/amazon/aws/hooks/emr.py
index 54127d98a3..1ff2129277 100644
--- a/airflow/providers/amazon/aws/hooks/emr.py
+++ b/airflow/providers/amazon/aws/hooks/emr.py
@@ -291,7 +291,7 @@ class EmrServerlessHook(AwsBaseHook):
for r in iterator:
job_ids = [jr["id"] for jr in r["jobRuns"]]
count += len(job_ids)
- if len(job_ids) > 0:
+ if job_ids:
self.log.info(
"Cancelling %s pending job(s) for the application %s so
that it can be stopped",
len(job_ids),
diff --git a/airflow/providers/amazon/aws/hooks/s3.py
b/airflow/providers/amazon/aws/hooks/s3.py
index 4e44a031b5..e2a46f54eb 100644
--- a/airflow/providers/amazon/aws/hooks/s3.py
+++ b/airflow/providers/amazon/aws/hooks/s3.py
@@ -503,7 +503,7 @@ class S3Hook(AwsBaseHook):
if wildcard_match:
keys = await self.get_file_metadata_async(client, bucket_name, key)
key_matches = [k for k in keys if fnmatch.fnmatch(k["Key"], key)]
- if len(key_matches) == 0:
+ if not key_matches:
return False
else:
obj = await self.get_head_object_async(client, key, bucket_name)
diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py
b/airflow/providers/amazon/aws/hooks/sagemaker.py
index 364e073b1a..40b9b55c54 100644
--- a/airflow/providers/amazon/aws/hooks/sagemaker.py
+++ b/airflow/providers/amazon/aws/hooks/sagemaker.py
@@ -81,7 +81,7 @@ def
secondary_training_status_changed(current_job_description: dict, prev_job_de
:return: Whether the secondary status message of a training job changed or
not.
"""
current_secondary_status_transitions =
current_job_description.get("SecondaryStatusTransitions")
- if current_secondary_status_transitions is None or
len(current_secondary_status_transitions) == 0:
+ if not current_secondary_status_transitions:
return False
prev_job_secondary_status_transitions = (
@@ -90,8 +90,7 @@ def
secondary_training_status_changed(current_job_description: dict, prev_job_de
last_message = (
prev_job_secondary_status_transitions[-1]["StatusMessage"]
- if prev_job_secondary_status_transitions is not None
- and len(prev_job_secondary_status_transitions) > 0
+ if prev_job_secondary_status_transitions
else ""
)
@@ -111,7 +110,7 @@ def secondary_training_status_message(
:return: Job status string to be printed.
"""
current_transitions = job_description.get("SecondaryStatusTransitions")
- if current_transitions is None or len(current_transitions) == 0:
+ if not current_transitions:
return ""
prev_transitions_num = 0
@@ -584,7 +583,7 @@ class SageMakerHook(AwsBaseHook):
# the container starts logging, so ignore any errors thrown
about that
pass
- if len(stream_names) > 0:
+ if stream_names:
for idx, event in self.multi_stream_iter(log_group, stream_names,
positions):
self.log.info(event["message"])
ts, count = positions[stream_names[idx]]
diff --git a/airflow/providers/amazon/aws/operators/sagemaker.py
b/airflow/providers/amazon/aws/operators/sagemaker.py
index 1547d2203c..514ae6a921 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker.py
@@ -1632,7 +1632,7 @@ class SageMakerCreateNotebookOperator(BaseOperator):
"DirectInternetAccess": self.direct_internet_access,
"RootAccess": self.root_access,
}
- if len(self.create_instance_kwargs) > 0:
+ if self.create_instance_kwargs:
create_notebook_instance_kwargs.update(self.create_instance_kwargs)
self.log.info("Creating SageMaker notebook %s.", self.instance_name)
diff --git a/airflow/providers/amazon/aws/sensors/batch.py
b/airflow/providers/amazon/aws/sensors/batch.py
index e493343c73..c193a747d1 100644
--- a/airflow/providers/amazon/aws/sensors/batch.py
+++ b/airflow/providers/amazon/aws/sensors/batch.py
@@ -176,7 +176,7 @@ class BatchComputeEnvironmentSensor(BaseSensorOperator):
computeEnvironments=[self.compute_environment]
)
- if len(response["computeEnvironments"]) == 0:
+ if not response["computeEnvironments"]:
raise AirflowException(f"AWS Batch compute environment
{self.compute_environment} not found")
status = response["computeEnvironments"][0]["status"]
@@ -241,7 +241,7 @@ class BatchJobQueueSensor(BaseSensorOperator):
jobQueues=[self.job_queue]
)
- if len(response["jobQueues"]) == 0:
+ if not response["jobQueues"]:
if self.treat_non_existing_as_deleted:
return True
else:
diff --git a/airflow/providers/amazon/aws/sensors/s3.py
b/airflow/providers/amazon/aws/sensors/s3.py
index 37f18c7621..704fcd95d9 100644
--- a/airflow/providers/amazon/aws/sensors/s3.py
+++ b/airflow/providers/amazon/aws/sensors/s3.py
@@ -116,7 +116,7 @@ class S3KeySensor(BaseSensorOperator):
prefix = re.split(r"[\[\*\?]", key, 1)[0]
keys = self.hook.get_file_metadata(prefix, bucket_name)
key_matches = [k for k in keys if fnmatch.fnmatch(k["Key"], key)]
- if len(key_matches) == 0:
+ if not key_matches:
return False
# Reduce the set of metadata to size only
diff --git a/airflow/providers/amazon/aws/sensors/sqs.py
b/airflow/providers/amazon/aws/sensors/sqs.py
index 705f100fb1..c4bf2621f3 100644
--- a/airflow/providers/amazon/aws/sensors/sqs.py
+++ b/airflow/providers/amazon/aws/sensors/sqs.py
@@ -186,7 +186,7 @@ class SqsSensor(BaseSensorOperator):
self.message_filtering_config,
)
- if not len(messages):
+ if not messages:
continue
message_batch.extend(messages)
@@ -203,7 +203,7 @@ class SqsSensor(BaseSensorOperator):
if "Successful" not in response:
raise AirflowException(f"Delete SQS Messages failed
{response} for messages {messages}")
- if not len(message_batch):
+ if not message_batch:
return False
context["ti"].xcom_push(key="messages", value=message_batch)