This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 85acbb4ae9 Refactor: Remove useless str() calls (#33629)
85acbb4ae9 is described below
commit 85acbb4ae9bc26248ca624fa4d289feccba00836
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Thu Aug 24 09:49:36 2023 +0000
Refactor: Remove useless str() calls (#33629)
Co-authored-by: Tzu-ping Chung <[email protected]>
---
airflow/api/common/experimental/get_code.py | 2 +-
.../managers/fab/security_manager/modules/db.py | 36 +++++++++++-----------
airflow/cli/commands/task_command.py | 2 +-
airflow/example_dags/example_bash_operator.py | 2 +-
.../example_short_circuit_decorator.py | 4 +--
.../example_dags/example_short_circuit_operator.py | 4 +--
airflow/executors/base_executor.py | 2 +-
airflow/executors/debug_executor.py | 2 +-
airflow/executors/local_executor.py | 2 +-
airflow/executors/sequential_executor.py | 2 +-
airflow/jobs/job.py | 2 +-
airflow/jobs/local_task_job_runner.py | 2 +-
airflow/jobs/scheduler_job_runner.py | 2 +-
airflow/policies.py | 2 +-
airflow/providers/amazon/aws/hooks/dynamodb.py | 2 +-
.../amazon/aws/log/cloudwatch_task_handler.py | 2 +-
airflow/providers/amazon/aws/sensors/sqs.py | 4 +--
airflow/providers/amazon/aws/triggers/sqs.py | 4 +--
airflow/providers/apache/livy/triggers/livy.py | 2 +-
airflow/providers/arangodb/hooks/arangodb.py | 2 +-
.../kubernetes/executors/kubernetes_executor.py | 2 +-
airflow/providers/dbt/cloud/hooks/dbt.py | 8 ++---
airflow/providers/ftp/sensors/ftp.py | 4 +--
airflow/providers/github/operators/github.py | 4 +--
airflow/providers/github/sensors/github.py | 4 +--
.../providers/google/cloud/log/gcs_task_handler.py | 2 +-
.../providers/google/cloud/operators/mlengine.py | 4 +--
.../google/cloud/transfers/cassandra_to_gcs.py | 6 ++--
.../google/cloud/transfers/facebook_ads_to_gcs.py | 4 +--
.../google/cloud/triggers/bigquery_dts.py | 2 +-
airflow/providers/grpc/hooks/grpc.py | 2 +-
airflow/providers/microsoft/azure/hooks/synapse.py | 2 +-
.../microsoft/azure/triggers/data_factory.py | 2 +-
.../providers/microsoft/winrm/operators/winrm.py | 2 +-
airflow/providers/sftp/operators/sftp.py | 2 +-
airflow/providers/sftp/sensors/sftp.py | 2 +-
airflow/sensors/filesystem.py | 2 +-
airflow/triggers/file.py | 2 +-
airflow/utils/log/file_task_handler.py | 2 +-
airflow/www/views.py | 2 +-
.../endpoints/test_connection_endpoint.py | 2 +-
.../endpoints/test_dag_run_endpoint.py | 16 +++++-----
.../endpoints/test_event_log_endpoint.py | 2 +-
tests/dags/test_example_bash_operator.py | 2 +-
tests/dags/test_miscellaneous.py | 2 +-
tests/models/test_cleartasks.py | 4 +--
tests/providers/amazon/aws/utils/eks_test_utils.py | 6 ++--
.../cncf/kubernetes/models/test_secret.py | 4 +--
tests/providers/microsoft/azure/hooks/test_asb.py | 2 +-
tests/system/providers/github/example_github.py | 4 +--
.../microsoft/azure/example_azure_service_bus.py | 2 +-
51 files changed, 91 insertions(+), 97 deletions(-)
diff --git a/airflow/api/common/experimental/get_code.py
b/airflow/api/common/experimental/get_code.py
index 9e2e8c08a4..b74ed036cc 100644
--- a/airflow/api/common/experimental/get_code.py
+++ b/airflow/api/common/experimental/get_code.py
@@ -37,5 +37,5 @@ def get_code(dag_id: str) -> str:
try:
return DagCode.get_code_by_fileloc(dag.fileloc)
except (OSError, DagCodeNotFound) as exception:
- error_message = f"Error {str(exception)} while reading Dag id {dag_id}
Code"
+ error_message = f"Error {exception} while reading Dag id {dag_id} Code"
raise AirflowException(error_message, exception)
diff --git a/airflow/auth/managers/fab/security_manager/modules/db.py
b/airflow/auth/managers/fab/security_manager/modules/db.py
index c7a1556acd..c9b7b34d5e 100644
--- a/airflow/auth/managers/fab/security_manager/modules/db.py
+++ b/airflow/auth/managers/fab/security_manager/modules/db.py
@@ -86,7 +86,7 @@ class FabAirflowSecurityManagerOverrideDb:
if self.count_users() == 0 and self.auth_role_public !=
self.auth_role_admin:
log.warning(const.LOGMSG_WAR_SEC_NO_USER)
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_CREATE_DB.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_CREATE_DB.format(e))
exit(1)
"""
@@ -106,7 +106,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
log.info(const.LOGMSG_INF_SEC_UPD_ROLE.format(role))
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_UPD_ROLE.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_UPD_ROLE.format(e))
self.get_session.rollback()
return None
return role
@@ -123,7 +123,7 @@ class FabAirflowSecurityManagerOverrideDb:
log.info(const.LOGMSG_INF_SEC_ADD_ROLE.format(name))
return role
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_ADD_ROLE.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_ADD_ROLE.format(e))
self.get_session.rollback()
return role
@@ -190,7 +190,7 @@ class FabAirflowSecurityManagerOverrideDb:
log.info(const.LOGMSG_INF_SEC_ADD_USER.format(username))
return user
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_ADD_USER.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_ADD_USER.format(e))
self.get_session.rollback()
return False
@@ -226,7 +226,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
return register_user
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_ADD_REGISTER_USER.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_ADD_REGISTER_USER.format(e))
self.get_session.rollback()
return None
@@ -269,7 +269,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
log.info(const.LOGMSG_INF_SEC_UPD_USER.format(user))
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_UPD_USER.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_UPD_USER.format(e))
self.get_session.rollback()
return False
@@ -284,7 +284,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
return True
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_DEL_REGISTER_USER.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_DEL_REGISTER_USER.format(e))
self.get_session.rollback()
return False
@@ -322,7 +322,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
return action
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_ADD_PERMISSION.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_ADD_PERMISSION.format(e))
self.get_session.rollback()
return action
@@ -349,7 +349,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
return True
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_DEL_PERMISSION.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_DEL_PERMISSION.format(e))
self.get_session.rollback()
return False
@@ -383,7 +383,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
return resource
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_ADD_VIEWMENU.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_ADD_VIEWMENU.format(e))
self.get_session.rollback()
return resource
@@ -419,7 +419,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.get_session.commit()
return True
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_DEL_PERMISSION.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_DEL_PERMISSION.format(e))
self.get_session.rollback()
return False
@@ -481,10 +481,10 @@ class FabAirflowSecurityManagerOverrideDb:
try:
self.get_session.add(perm)
self.get_session.commit()
- log.info(const.LOGMSG_INF_SEC_ADD_PERMVIEW.format(str(perm)))
+ log.info(const.LOGMSG_INF_SEC_ADD_PERMVIEW.format(perm))
return perm
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_ADD_PERMVIEW.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_ADD_PERMVIEW.format(e))
self.get_session.rollback()
return None
@@ -518,7 +518,7 @@ class FabAirflowSecurityManagerOverrideDb:
self.delete_action(perm.action.name)
log.info(const.LOGMSG_INF_SEC_DEL_PERMVIEW.format(action_name,
resource_name))
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_DEL_PERMVIEW.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_DEL_PERMVIEW.format(e))
self.get_session.rollback()
def add_permission_to_role(self, role: Role, permission: Permission |
None) -> None:
@@ -534,9 +534,9 @@ class FabAirflowSecurityManagerOverrideDb:
role.permissions.append(permission)
self.get_session.merge(role)
self.get_session.commit()
-
log.info(const.LOGMSG_INF_SEC_ADD_PERMROLE.format(str(permission), role.name))
+ log.info(const.LOGMSG_INF_SEC_ADD_PERMROLE.format(permission,
role.name))
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_ADD_PERMROLE.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_ADD_PERMROLE.format(e))
self.get_session.rollback()
def remove_permission_from_role(self, role: Role, permission: Permission)
-> None:
@@ -551,7 +551,7 @@ class FabAirflowSecurityManagerOverrideDb:
role.permissions.remove(permission)
self.get_session.merge(role)
self.get_session.commit()
-
log.info(const.LOGMSG_INF_SEC_DEL_PERMROLE.format(str(permission), role.name))
+ log.info(const.LOGMSG_INF_SEC_DEL_PERMROLE.format(permission,
role.name))
except Exception as e:
- log.error(const.LOGMSG_ERR_SEC_DEL_PERMROLE.format(str(e)))
+ log.error(const.LOGMSG_ERR_SEC_DEL_PERMROLE.format(e))
self.get_session.rollback()
diff --git a/airflow/cli/commands/task_command.py
b/airflow/cli/commands/task_command.py
index 205d87b2f0..97b290f078 100644
--- a/airflow/cli/commands/task_command.py
+++ b/airflow/cli/commands/task_command.py
@@ -548,7 +548,7 @@ def task_states_for_dag_run(args, session: Session =
NEW_SESSION) -> None:
select(DagRun).where(DagRun.execution_date == execution_date,
DagRun.dag_id == args.dag_id)
)
except (ParserError, TypeError) as err:
- raise AirflowException(f"Error parsing the supplied
execution_date. Error: {str(err)}")
+ raise AirflowException(f"Error parsing the supplied
execution_date. Error: {err}")
if dag_run is None:
raise DagRunNotFound(
diff --git a/airflow/example_dags/example_bash_operator.py
b/airflow/example_dags/example_bash_operator.py
index 2f0343e158..a855ce9859 100644
--- a/airflow/example_dags/example_bash_operator.py
+++ b/airflow/example_dags/example_bash_operator.py
@@ -50,7 +50,7 @@ with DAG(
for i in range(3):
task = BashOperator(
- task_id="runme_" + str(i),
+ task_id=f"runme_{i}",
bash_command='echo "{{ task_instance_key_str }}" && sleep 1',
)
task >> run_this
diff --git a/airflow/example_dags/example_short_circuit_decorator.py
b/airflow/example_dags/example_short_circuit_decorator.py
index 30f6cd0e01..79a8c6904b 100644
--- a/airflow/example_dags/example_short_circuit_decorator.py
+++ b/airflow/example_dags/example_short_circuit_decorator.py
@@ -32,8 +32,8 @@ def example_short_circuit_decorator():
def check_condition(condition):
return condition
- ds_true = [EmptyOperator(task_id="true_" + str(i)) for i in [1, 2]]
- ds_false = [EmptyOperator(task_id="false_" + str(i)) for i in [1, 2]]
+ ds_true = [EmptyOperator(task_id=f"true_{i}") for i in [1, 2]]
+ ds_false = [EmptyOperator(task_id=f"false_{i}") for i in [1, 2]]
condition_is_true =
check_condition.override(task_id="condition_is_true")(condition=True)
condition_is_false =
check_condition.override(task_id="condition_is_false")(condition=False)
diff --git a/airflow/example_dags/example_short_circuit_operator.py
b/airflow/example_dags/example_short_circuit_operator.py
index 77f976c502..5a35f0b49b 100644
--- a/airflow/example_dags/example_short_circuit_operator.py
+++ b/airflow/example_dags/example_short_circuit_operator.py
@@ -42,8 +42,8 @@ with DAG(
python_callable=lambda: False,
)
- ds_true = [EmptyOperator(task_id="true_" + str(i)) for i in [1, 2]]
- ds_false = [EmptyOperator(task_id="false_" + str(i)) for i in [1, 2]]
+ ds_true = [EmptyOperator(task_id=f"true_{i}") for i in [1, 2]]
+ ds_false = [EmptyOperator(task_id=f"false_{i}") for i in [1, 2]]
chain(cond_true, *ds_true)
chain(cond_false, *ds_false)
diff --git a/airflow/executors/base_executor.py
b/airflow/executors/base_executor.py
index 7ae175540f..909a955cbf 100644
--- a/airflow/executors/base_executor.py
+++ b/airflow/executors/base_executor.py
@@ -308,7 +308,7 @@ class BaseExecutor(LoggingMixin):
try:
self.running.remove(key)
except KeyError:
- self.log.debug("Could not find key: %s", str(key))
+ self.log.debug("Could not find key: %s", key)
self.event_buffer[key] = state, info
def fail(self, key: TaskInstanceKey, info=None) -> None:
diff --git a/airflow/executors/debug_executor.py
b/airflow/executors/debug_executor.py
index 8a46d6cda0..4ecebdff8b 100644
--- a/airflow/executors/debug_executor.py
+++ b/airflow/executors/debug_executor.py
@@ -92,7 +92,7 @@ class DebugExecutor(BaseExecutor):
except Exception as e:
ti.set_state(TaskInstanceState.FAILED)
self.change_state(key, TaskInstanceState.FAILED)
- self.log.exception("Failed to execute task: %s.", str(e))
+ self.log.exception("Failed to execute task: %s.", e)
return False
def queue_task_instance(
diff --git a/airflow/executors/local_executor.py
b/airflow/executors/local_executor.py
index cf88ca13b2..9dcfdb629f 100644
--- a/airflow/executors/local_executor.py
+++ b/airflow/executors/local_executor.py
@@ -99,7 +99,7 @@ class LocalWorkerBase(Process, LoggingMixin):
subprocess.check_call(command, close_fds=True)
return TaskInstanceState.SUCCESS
except subprocess.CalledProcessError as e:
- self.log.error("Failed to execute task %s.", str(e))
+ self.log.error("Failed to execute task %s.", e)
return TaskInstanceState.FAILED
def _execute_work_in_fork(self, command: CommandType) -> TaskInstanceState:
diff --git a/airflow/executors/sequential_executor.py
b/airflow/executors/sequential_executor.py
index 2715edad6e..8ea3e42dc5 100644
--- a/airflow/executors/sequential_executor.py
+++ b/airflow/executors/sequential_executor.py
@@ -78,7 +78,7 @@ class SequentialExecutor(BaseExecutor):
self.change_state(key, TaskInstanceState.SUCCESS)
except subprocess.CalledProcessError as e:
self.change_state(key, TaskInstanceState.FAILED)
- self.log.error("Failed to execute task %s.", str(e))
+ self.log.error("Failed to execute task %s.", e)
self.commands_to_run = []
diff --git a/airflow/jobs/job.py b/airflow/jobs/job.py
index f20808868d..159b8f934a 100644
--- a/airflow/jobs/job.py
+++ b/airflow/jobs/job.py
@@ -153,7 +153,7 @@ class Job(Base, LoggingMixin):
try:
self.on_kill()
except Exception as e:
- self.log.error("on_kill() method failed: %s", str(e))
+ self.log.error("on_kill() method failed: %s", e)
session.merge(job)
session.commit()
raise AirflowException("Job shut down externally.")
diff --git a/airflow/jobs/local_task_job_runner.py
b/airflow/jobs/local_task_job_runner.py
index d04af55dce..f20a894361 100644
--- a/airflow/jobs/local_task_job_runner.py
+++ b/airflow/jobs/local_task_job_runner.py
@@ -285,7 +285,7 @@ class LocalTaskJobRunner(BaseJobRunner["Job |
JobPydantic"], LoggingMixin):
else:
dagrun_timeout = None
if dagrun_timeout and execution_time > dagrun_timeout:
- self.log.warning("DagRun timed out after %s.",
str(execution_time))
+ self.log.warning("DagRun timed out after %s.",
execution_time)
# potential race condition, the _run_raw_task commits `success` or
other state
# but task_runner does not exit right away due to slow process
shutdown or any other reasons
diff --git a/airflow/jobs/scheduler_job_runner.py
b/airflow/jobs/scheduler_job_runner.py
index 50f78a48a9..5bd9f816a3 100644
--- a/airflow/jobs/scheduler_job_runner.py
+++ b/airflow/jobs/scheduler_job_runner.py
@@ -899,7 +899,7 @@ class SchedulerJobRunner(BaseJobRunner[Job], LoggingMixin):
if callback_to_run:
self._send_dag_callbacks_to_processor(dag, callback_to_run)
except Exception as e: # should not fail the scheduler
- self.log.exception("Failed to update dag run state for paused dags
due to %s", str(e))
+ self.log.exception("Failed to update dag run state for paused dags
due to %s", e)
def _run_scheduler_loop(self) -> None:
"""
diff --git a/airflow/policies.py b/airflow/policies.py
index f37703fe2a..6b40b5ac5f 100644
--- a/airflow/policies.py
+++ b/airflow/policies.py
@@ -171,7 +171,7 @@ def make_plugin_from_local_settings(pm:
pluggy.PluginManager, module, names: set
#
codestr = textwrap.dedent(
f"""
- def {name}_name_mismatch_shim{str(desired_sig)}:
+ def {name}_name_mismatch_shim{desired_sig}:
return __target({' ,'.join(desired_sig.parameters)})
"""
)
diff --git a/airflow/providers/amazon/aws/hooks/dynamodb.py
b/airflow/providers/amazon/aws/hooks/dynamodb.py
index db38b401af..957730a754 100644
--- a/airflow/providers/amazon/aws/hooks/dynamodb.py
+++ b/airflow/providers/amazon/aws/hooks/dynamodb.py
@@ -68,4 +68,4 @@ class DynamoDBHook(AwsBaseHook):
batch.put_item(Item=item)
return True
except Exception as general_error:
- raise AirflowException(f"Failed to insert items in dynamodb,
error: {str(general_error)}")
+ raise AirflowException(f"Failed to insert items in dynamodb,
error: {general_error}")
diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
index 0cc9c43b05..374eb29b2f 100644
--- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
+++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
@@ -99,7 +99,7 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
except Exception as e:
log = (
f"*** Unable to read remote logs from Cloudwatch (log_group:
{self.log_group}, log_stream: "
- f"{stream_name})\n*** {str(e)}\n\n"
+ f"{stream_name})\n*** {e}\n\n"
)
self.log.error(log)
local_log, metadata = super()._read(task_instance, try_number,
metadata)
diff --git a/airflow/providers/amazon/aws/sensors/sqs.py
b/airflow/providers/amazon/aws/sensors/sqs.py
index a61652acb2..705f100fb1 100644
--- a/airflow/providers/amazon/aws/sensors/sqs.py
+++ b/airflow/providers/amazon/aws/sensors/sqs.py
@@ -202,9 +202,7 @@ class SqsSensor(BaseSensorOperator):
response =
self.hook.conn.delete_message_batch(QueueUrl=self.sqs_queue, Entries=entries)
if "Successful" not in response:
- raise AirflowException(
- "Delete SQS Messages failed " + str(response) + " for
messages " + str(messages)
- )
+ raise AirflowException(f"Delete SQS Messages failed
{response} for messages {messages}")
if not len(message_batch):
return False
diff --git a/airflow/providers/amazon/aws/triggers/sqs.py
b/airflow/providers/amazon/aws/triggers/sqs.py
index 7e26b9c28f..68f85c5f53 100644
--- a/airflow/providers/amazon/aws/triggers/sqs.py
+++ b/airflow/providers/amazon/aws/triggers/sqs.py
@@ -149,9 +149,7 @@ class SqsSensorTrigger(BaseTrigger):
response = await
client.delete_message_batch(QueueUrl=self.sqs_queue, Entries=entries)
if "Successful" not in response:
- raise AirflowException(
- f"Delete SQS Messages failed {str(response)} for
messages {str(messages)}"
- )
+ raise AirflowException(f"Delete SQS Messages failed
{response} for messages {messages}")
return message_batch
diff --git a/airflow/providers/apache/livy/triggers/livy.py
b/airflow/providers/apache/livy/triggers/livy.py
index 95ccc8577b..30d6e393f1 100644
--- a/airflow/providers/apache/livy/triggers/livy.py
+++ b/airflow/providers/apache/livy/triggers/livy.py
@@ -101,7 +101,7 @@ class LivyTrigger(BaseTrigger):
{
"status": "error",
"batch_id": self._batch_id,
- "response": f"Batch {self._batch_id} did not succeed with
{str(exc)}",
+ "response": f"Batch {self._batch_id} did not succeed with
{exc}",
"log_lines": None,
}
)
diff --git a/airflow/providers/arangodb/hooks/arangodb.py
b/airflow/providers/arangodb/hooks/arangodb.py
index 07363e00f5..f8f4d90072 100644
--- a/airflow/providers/arangodb/hooks/arangodb.py
+++ b/airflow/providers/arangodb/hooks/arangodb.py
@@ -108,7 +108,7 @@ class ArangoDBHook(BaseHook):
f"Failed to execute AQLQuery, error connecting to
database: {self.database}"
)
except AQLQueryExecuteError as error:
- raise AirflowException(f"Failed to execute AQLQuery, error:
{str(error)}")
+ raise AirflowException(f"Failed to execute AQLQuery, error:
{error}")
def create_collection(self, name):
if not self.db_conn.has_collection(name):
diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
index 5bc39625da..ac11622216 100644
--- a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
+++ b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
@@ -516,7 +516,7 @@ class KubernetesExecutor(BaseExecutor):
if log:
messages.append("Found logs through kube API")
except Exception as e:
- messages.append(f"Reading from k8s pod logs failed: {str(e)}")
+ messages.append(f"Reading from k8s pod logs failed: {e}")
return messages, ["\n".join(log)]
def try_adopt_task_instances(self, tis: Sequence[TaskInstance]) ->
Sequence[TaskInstance]:
diff --git a/airflow/providers/dbt/cloud/hooks/dbt.py
b/airflow/providers/dbt/cloud/hooks/dbt.py
index 8c589970ab..4a9785da3e 100644
--- a/airflow/providers/dbt/cloud/hooks/dbt.py
+++ b/airflow/providers/dbt/cloud/hooks/dbt.py
@@ -254,7 +254,7 @@ class DbtCloudHook(HttpHook):
Valid values are "trigger", "job", "repository", and "environment".
"""
try:
- self.log.info("Getting the status of job run %s.", str(run_id))
+ self.log.info("Getting the status of job run %s.", run_id)
response = await self.get_job_details(
run_id, account_id=account_id, include_related=include_related
)
@@ -490,14 +490,12 @@ class DbtCloudHook(HttpHook):
:param account_id: Optional. The ID of a dbt Cloud account.
:return: The status of a dbt Cloud job run.
"""
- self.log.info("Getting the status of job run %s.", str(run_id))
+ self.log.info("Getting the status of job run %s.", run_id)
job_run = self.get_job_run(account_id=account_id, run_id=run_id)
job_run_status = job_run.json()["data"]["status"]
- self.log.info(
- "Current status of job run %s: %s", str(run_id),
DbtCloudJobRunStatus(job_run_status).name
- )
+ self.log.info("Current status of job run %s: %s", run_id,
DbtCloudJobRunStatus(job_run_status).name)
return job_run_status
diff --git a/airflow/providers/ftp/sensors/ftp.py
b/airflow/providers/ftp/sensors/ftp.py
index 92710aeb21..4ec7cf7516 100644
--- a/airflow/providers/ftp/sensors/ftp.py
+++ b/airflow/providers/ftp/sensors/ftp.py
@@ -73,10 +73,10 @@ class FTPSensor(BaseSensorOperator):
self.log.info("Poking for %s", self.path)
try:
mod_time = hook.get_mod_time(self.path)
- self.log.info("Found File %s last modified: %s",
str(self.path), str(mod_time))
+ self.log.info("Found File %s last modified: %s", self.path,
mod_time)
except ftplib.error_perm as e:
- self.log.error("Ftp error encountered: %s", str(e))
+ self.log.error("Ftp error encountered: %s", e)
error_code = self._get_error_code(e)
if (error_code != 550) and (
self.fail_on_transient_errors or (error_code not in
self.transient_errors)
diff --git a/airflow/providers/github/operators/github.py
b/airflow/providers/github/operators/github.py
index 82c9ab3b77..e7e305808f 100644
--- a/airflow/providers/github/operators/github.py
+++ b/airflow/providers/github/operators/github.py
@@ -74,6 +74,6 @@ class GithubOperator(BaseOperator):
return github_result
except GithubException as github_error:
- raise AirflowException(f"Failed to execute GithubOperator, error:
{str(github_error)}")
+ raise AirflowException(f"Failed to execute GithubOperator, error:
{github_error}")
except Exception as e:
- raise AirflowException(f"GitHub operator error: {str(e)}")
+ raise AirflowException(f"GitHub operator error: {e}")
diff --git a/airflow/providers/github/sensors/github.py
b/airflow/providers/github/sensors/github.py
index eb9caad5ae..db943cd0ea 100644
--- a/airflow/providers/github/sensors/github.py
+++ b/airflow/providers/github/sensors/github.py
@@ -136,9 +136,9 @@ class GithubTagSensor(BaseGithubRepositorySensor):
result = self.tag_name in all_tags
except GithubException as github_error: # type: ignore[misc]
- raise AirflowException(f"Failed to execute GithubSensor, error:
{str(github_error)}")
+ raise AirflowException(f"Failed to execute GithubSensor, error:
{github_error}")
except Exception as e:
- raise AirflowException(f"GitHub operator error: {str(e)}")
+ raise AirflowException(f"GitHub operator error: {e}")
if result is True:
self.log.info("Tag %s exists in %s repository, Success.",
self.tag_name, self.repository_name)
diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py
b/airflow/providers/google/cloud/log/gcs_task_handler.py
index 121a668857..79055f0847 100644
--- a/airflow/providers/google/cloud/log/gcs_task_handler.py
+++ b/airflow/providers/google/cloud/log/gcs_task_handler.py
@@ -247,7 +247,7 @@ class GCSTaskHandler(FileTaskHandler, LoggingMixin):
pass
else:
log += self._add_message(
- f"Error checking for previous log; if exists, may be
overwritten: {str(e)}"
+ f"Error checking for previous log; if exists, may be
overwritten: {e}"
)
self.log.warning("Error checking for previous log: %s", e)
try:
diff --git a/airflow/providers/google/cloud/operators/mlengine.py
b/airflow/providers/google/cloud/operators/mlengine.py
index b391041372..30d1225c05 100644
--- a/airflow/providers/google/cloud/operators/mlengine.py
+++ b/airflow/providers/google/cloud/operators/mlengine.py
@@ -278,7 +278,7 @@ class
MLEngineStartBatchPredictionJobOperator(GoogleCloudBaseOperator):
)
if finished_prediction_job["state"] != "SUCCEEDED":
- self.log.error("MLEngine batch prediction job failed: %s",
str(finished_prediction_job))
+ self.log.error("MLEngine batch prediction job failed: %s",
finished_prediction_job)
raise RuntimeError(finished_prediction_job["errorMessage"])
return finished_prediction_job["predictionOutput"]
@@ -1153,7 +1153,7 @@ class
MLEngineStartTrainingJobOperator(GoogleCloudBaseOperator):
def _handle_job_error(self, finished_training_job) -> None:
if finished_training_job["state"] != "SUCCEEDED":
- self.log.error("MLEngine training job failed: %s",
str(finished_training_job))
+ self.log.error("MLEngine training job failed: %s",
finished_training_job)
raise RuntimeError(finished_training_job["errorMessage"])
def execute(self, context: Context):
diff --git a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py
b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py
index 408977d7cc..620edfbe15 100644
--- a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py
@@ -284,7 +284,7 @@ class CassandraToGCSOperator(BaseOperator):
elif isinstance(value, OrderedMapSerializedKey):
return self.convert_map_type(value)
else:
- raise AirflowException("Unexpected value: " + str(value))
+ raise AirflowException(f"Unexpected value: {value}")
def convert_array_types(self, value: list[Any] | SortedSet) -> list[Any]:
"""Maps convert_value over array."""
@@ -308,7 +308,7 @@ class CassandraToGCSOperator(BaseOperator):
will be named 'field_<index>', where index is determined by the order
of the tuple elements defined in cassandra.
"""
- names = ["field_" + str(i) for i in range(len(values))]
+ names = [f"field_{i}" for i in range(len(values))]
return self.generate_data_dict(names, values)
def convert_map_type(self, value: OrderedMapSerializedKey) ->
list[dict[str, Any]]:
@@ -351,7 +351,7 @@ class CassandraToGCSOperator(BaseOperator):
types = type_.subtypes
if types and not names and type_.cassname == "TupleType":
- names = ["field_" + str(i) for i in range(len(types))]
+ names = [f"field_{i}" for i in range(len(types))]
elif types and not names and type_.cassname == "MapType":
names = ["key", "value"]
diff --git a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py
b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py
index 30e7f61860..9273553e1b 100644
--- a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py
@@ -141,11 +141,11 @@ class FacebookAdsReportToGcsOperator(BaseOperator):
account_id=account_id,
)
else:
- self.log.warning("account_id: %s returned empty report",
str(account_id))
+ self.log.warning("account_id: %s returned empty report",
account_id)
else:
message = (
"Facebook Ads Hook returned different type than expected.
Expected return types should be "
- "List or Dict. Actual return type of the Hook: " +
str(type(bulk_report))
+ f"List or Dict. Actual return type of the Hook:
{type(bulk_report)}"
)
raise AirflowException(message)
total_row_count =
self._decide_and_flush(converted_rows_with_action=converted_rows_with_action)
diff --git a/airflow/providers/google/cloud/triggers/bigquery_dts.py
b/airflow/providers/google/cloud/triggers/bigquery_dts.py
index 3a5ab2267f..d5a920a762 100644
--- a/airflow/providers/google/cloud/triggers/bigquery_dts.py
+++ b/airflow/providers/google/cloud/triggers/bigquery_dts.py
@@ -133,7 +133,7 @@ class BigQueryDataTransferRunTrigger(BaseTrigger):
yield TriggerEvent(
{
"status": "failed",
- "message": f"Trigger failed with exception: {str(e)}",
+ "message": f"Trigger failed with exception: {e}",
}
)
return
diff --git a/airflow/providers/grpc/hooks/grpc.py
b/airflow/providers/grpc/hooks/grpc.py
index c42262f914..b7b0344a4f 100644
--- a/airflow/providers/grpc/hooks/grpc.py
+++ b/airflow/providers/grpc/hooks/grpc.py
@@ -112,7 +112,7 @@ class GrpcHook(BaseHook):
else:
raise AirflowConfigException(
"auth_type not supported or not provided, channel cannot be
established, "
- f"given value: {str(auth_type)}"
+ f"given value: {auth_type}"
)
if self.interceptors:
diff --git a/airflow/providers/microsoft/azure/hooks/synapse.py
b/airflow/providers/microsoft/azure/hooks/synapse.py
index 881f918b60..84475a2f38 100644
--- a/airflow/providers/microsoft/azure/hooks/synapse.py
+++ b/airflow/providers/microsoft/azure/hooks/synapse.py
@@ -183,7 +183,7 @@ class AzureSynapseHook(BaseHook):
)
# Wait to check the status of the job run based on the
``check_interval`` configured.
- self.log.info("Sleeping for %s seconds", str(check_interval))
+ self.log.info("Sleeping for %s seconds", check_interval)
time.sleep(check_interval)
job_run_status = self.get_job_run_status()
diff --git a/airflow/providers/microsoft/azure/triggers/data_factory.py
b/airflow/providers/microsoft/azure/triggers/data_factory.py
index e3dd38ad66..1ce5484008 100644
--- a/airflow/providers/microsoft/azure/triggers/data_factory.py
+++ b/airflow/providers/microsoft/azure/triggers/data_factory.py
@@ -233,7 +233,7 @@ class AzureDataFactoryTrigger(BaseTrigger):
resource_group_name=self.resource_group_name,
factory_name=self.factory_name,
)
- self.log.info("Unexpected error %s caught. Cancel pipeline
run %s", str(e), self.run_id)
+ self.log.info("Unexpected error %s caught. Cancel pipeline
run %s", e, self.run_id)
except Exception as err:
yield TriggerEvent({"status": "error", "message":
str(err), "run_id": self.run_id})
yield TriggerEvent({"status": "error", "message": str(e),
"run_id": self.run_id})
diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py
b/airflow/providers/microsoft/winrm/operators/winrm.py
index 7aef926173..b8562fd588 100644
--- a/airflow/providers/microsoft/winrm/operators/winrm.py
+++ b/airflow/providers/microsoft/winrm/operators/winrm.py
@@ -141,7 +141,7 @@ class WinRMOperator(BaseOperator):
self.winrm_hook.winrm_protocol.close_shell(winrm_client) # type:
ignore[attr-defined]
except Exception as e:
- raise AirflowException(f"WinRM operator error: {str(e)}")
+ raise AirflowException(f"WinRM operator error: {e}")
if return_code == 0:
# returning output if do_xcom_push is set
diff --git a/airflow/providers/sftp/operators/sftp.py
b/airflow/providers/sftp/operators/sftp.py
index 95b3a8eeb9..949654cc46 100644
--- a/airflow/providers/sftp/operators/sftp.py
+++ b/airflow/providers/sftp/operators/sftp.py
@@ -188,7 +188,7 @@ class SFTPOperator(BaseOperator):
self.sftp_hook.store_file(_remote_filepath,
_local_filepath, confirm=self.confirm)
except Exception as e:
- raise AirflowException(f"Error while transferring {file_msg},
error: {str(e)}")
+ raise AirflowException(f"Error while transferring {file_msg},
error: {e}")
return self.local_filepath
diff --git a/airflow/providers/sftp/sensors/sftp.py
b/airflow/providers/sftp/sensors/sftp.py
index 82baab2bce..f6a1278c18 100644
--- a/airflow/providers/sftp/sensors/sftp.py
+++ b/airflow/providers/sftp/sensors/sftp.py
@@ -87,7 +87,7 @@ class SFTPSensor(BaseSensorOperator):
for actual_file_to_check in actual_files_to_check:
try:
mod_time = self.hook.get_mod_time(actual_file_to_check)
- self.log.info("Found File %s last modified: %s",
str(actual_file_to_check), str(mod_time))
+ self.log.info("Found File %s last modified: %s",
actual_file_to_check, mod_time)
except OSError as e:
if e.errno != SFTP_NO_SUCH_FILE:
raise e
diff --git a/airflow/sensors/filesystem.py b/airflow/sensors/filesystem.py
index 3eba10380f..d2d006818f 100644
--- a/airflow/sensors/filesystem.py
+++ b/airflow/sensors/filesystem.py
@@ -66,7 +66,7 @@ class FileSensor(BaseSensorOperator):
for path in glob(full_path, recursive=self.recursive):
if os.path.isfile(path):
mod_time =
datetime.datetime.fromtimestamp(os.path.getmtime(path)).strftime("%Y%m%d%H%M%S")
- self.log.info("Found File %s last modified: %s", str(path),
mod_time)
+ self.log.info("Found File %s last modified: %s", path,
mod_time)
return True
for _, _, files in os.walk(path):
diff --git a/airflow/triggers/file.py b/airflow/triggers/file.py
index 85a5a373ba..93880407e5 100644
--- a/airflow/triggers/file.py
+++ b/airflow/triggers/file.py
@@ -65,7 +65,7 @@ class FileTrigger(BaseTrigger):
if os.path.isfile(path):
mod_time_f = os.path.getmtime(path)
mod_time =
datetime.datetime.fromtimestamp(mod_time_f).strftime("%Y%m%d%H%M%S")
- self.log.info("Found File %s last modified: %s",
str(path), str(mod_time))
+ self.log.info("Found File %s last modified: %s", path,
mod_time)
yield TriggerEvent(True)
for _, _, files in os.walk(self.filepath):
if files:
diff --git a/airflow/utils/log/file_task_handler.py
b/airflow/utils/log/file_task_handler.py
index 1314cd7ff9..2530b8fdb5 100644
--- a/airflow/utils/log/file_task_handler.py
+++ b/airflow/utils/log/file_task_handler.py
@@ -520,7 +520,7 @@ class FileTaskHandler(logging.Handler):
messages.append(f"Found logs served from host {url}")
logs.append(response.text)
except Exception as e:
- messages.append(f"Could not read served logs: {str(e)}")
+ messages.append(f"Could not read served logs: {e}")
logger.exception("Could not read served logs")
return messages, logs
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 0e55ed60ab..275432f9bc 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -1694,7 +1694,7 @@ class Airflow(AirflowBaseView):
headers={"Content-Disposition": f"attachment;
filename={attachment_filename}"},
)
except AttributeError as e:
- error_messages = [f"Task log handler does not support read
logs.\n{str(e)}\n"]
+ error_messages = [f"Task log handler does not support read
logs.\n{e}\n"]
metadata["end_of_log"] = True
return {"message": error_messages, "error": True, "metadata":
metadata}
diff --git a/tests/api_connexion/endpoints/test_connection_endpoint.py
b/tests/api_connexion/endpoints/test_connection_endpoint.py
index de0b7aad97..24180b5052 100644
--- a/tests/api_connexion/endpoints/test_connection_endpoint.py
+++ b/tests/api_connexion/endpoints/test_connection_endpoint.py
@@ -351,7 +351,7 @@ class TestGetConnectionsPagination(TestConnectionEndpoint):
def _create_connections(self, count):
return [
- Connection(conn_id="TEST_CONN_ID" + str(i),
conn_type="TEST_CONN_TYPE" + str(i))
+ Connection(conn_id=f"TEST_CONN_ID{i}",
conn_type=f"TEST_CONN_TYPE{i}")
for i in range(1, count + 1)
]
diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py
b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
index 434511b9e9..e66139a289 100644
--- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py
+++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
@@ -137,7 +137,7 @@ class TestDagRunEndpoint:
dags.append(DagModel(dag_id="TEST_DAG_ID", is_active=True))
dagrun_model = DagRun(
dag_id="TEST_DAG_ID",
- run_id="TEST_DAG_RUN_ID_" + str(i),
+ run_id=f"TEST_DAG_RUN_ID_{i}",
run_type=DagRunType.MANUAL,
execution_date=timezone.parse(self.default_time) +
timedelta(days=i - 1),
start_date=timezone.parse(self.default_time),
@@ -148,11 +148,11 @@ class TestDagRunEndpoint:
if extra_dag:
for i in range(idx_start + 2, idx_start + 4):
- dags.append(DagModel(dag_id="TEST_DAG_ID_" + str(i)))
+ dags.append(DagModel(dag_id=f"TEST_DAG_ID_{i}"))
dag_runs.append(
DagRun(
- dag_id="TEST_DAG_ID_" + str(i),
- run_id="TEST_DAG_RUN_ID_" + str(i),
+ dag_id=f"TEST_DAG_ID_{i}",
+ run_id=f"TEST_DAG_RUN_ID_{i}",
run_type=DagRunType.MANUAL,
execution_date=timezone.parse(self.default_time_2),
start_date=timezone.parse(self.default_time),
@@ -496,7 +496,7 @@ class TestGetDagRunsPagination(TestDagRunEndpoint):
dag_runs = [
DagRun(
dag_id="TEST_DAG_ID",
- run_id="TEST_DAG_RUN_ID" + str(i),
+ run_id=f"TEST_DAG_RUN_ID{i}",
run_type=DagRunType.MANUAL,
execution_date=timezone.parse(self.default_time) +
timedelta(minutes=i),
start_date=timezone.parse(self.default_time),
@@ -581,7 +581,7 @@ class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
return [
DagRun(
dag_id="TEST_DAG_ID",
- run_id="TEST_START_EXEC_DAY_1" + str(i),
+ run_id=f"TEST_START_EXEC_DAY_1{i}",
run_type=DagRunType.MANUAL,
execution_date=timezone.parse(dates[i]),
start_date=timezone.parse(dates[i]),
@@ -872,7 +872,7 @@ class TestGetDagRunBatchPagination(TestDagRunEndpoint):
dag_runs = [
DagRun(
dag_id="TEST_DAG_ID",
- run_id="TEST_DAG_RUN_ID" + str(i),
+ run_id=f"TEST_DAG_RUN_ID{i}",
state="running",
run_type=DagRunType.MANUAL,
execution_date=timezone.parse(self.default_time) +
timedelta(minutes=i),
@@ -956,7 +956,7 @@ class TestGetDagRunBatchDateFilters(TestDagRunEndpoint):
dag_runs = [
DagRun(
dag_id="TEST_DAG_ID",
- run_id="TEST_START_EXEC_DAY_1" + str(i),
+ run_id=f"TEST_START_EXEC_DAY_1{i}",
run_type=DagRunType.MANUAL,
execution_date=timezone.parse(dates[i]),
start_date=timezone.parse(dates[i]),
diff --git a/tests/api_connexion/endpoints/test_event_log_endpoint.py
b/tests/api_connexion/endpoints/test_event_log_endpoint.py
index 18439f8a92..ee1efba55b 100644
--- a/tests/api_connexion/endpoints/test_event_log_endpoint.py
+++ b/tests/api_connexion/endpoints/test_event_log_endpoint.py
@@ -319,4 +319,4 @@ class TestGetEventLogPagination(TestEventLogEndpoint):
assert len(response.json["event_logs"]) == 150
def _create_event_logs(self, task_instance, count):
- return [Log(event="TEST_EVENT_" + str(i), task_instance=task_instance)
for i in range(1, count + 1)]
+ return [Log(event=f"TEST_EVENT_{i}", task_instance=task_instance) for
i in range(1, count + 1)]
diff --git a/tests/dags/test_example_bash_operator.py
b/tests/dags/test_example_bash_operator.py
index 3036487b66..1678799f4b 100644
--- a/tests/dags/test_example_bash_operator.py
+++ b/tests/dags/test_example_bash_operator.py
@@ -38,7 +38,7 @@ run_this.set_downstream(run_this_last)
for i in range(3):
task = BashOperator(
- task_id="runme_" + str(i), bash_command='echo "{{
task_instance_key_str }}" && sleep 1', dag=dag
+ task_id=f"runme_{i}", bash_command='echo "{{ task_instance_key_str }}"
&& sleep 1', dag=dag
)
task.set_downstream(run_this)
diff --git a/tests/dags/test_miscellaneous.py b/tests/dags/test_miscellaneous.py
index 587d33e692..cad59dda1e 100644
--- a/tests/dags/test_miscellaneous.py
+++ b/tests/dags/test_miscellaneous.py
@@ -55,7 +55,7 @@ run_this >> run_this_last
for i in range(3):
task = BashOperator(
- task_id="runme_" + str(i),
+ task_id=f"runme_{i}",
bash_command='echo "{{ task_instance_key_str }}" && sleep 1',
dag=dag,
)
diff --git a/tests/models/test_cleartasks.py b/tests/models/test_cleartasks.py
index ec504ba186..a832d3ab85 100644
--- a/tests/models/test_cleartasks.py
+++ b/tests/models/test_cleartasks.py
@@ -532,11 +532,11 @@ class TestClearTasks:
num_of_dags = 5
for i in range(num_of_dags):
dag = DAG(
- "test_dag_clear_" + str(i),
+ f"test_dag_clear_{i}",
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
- task = EmptyOperator(task_id="test_task_clear_" + str(i),
owner="test", dag=dag)
+ task = EmptyOperator(task_id=f"test_task_clear_{i}", owner="test",
dag=dag)
dr = dag.create_dagrun(
execution_date=DEFAULT_DATE,
diff --git a/tests/providers/amazon/aws/utils/eks_test_utils.py
b/tests/providers/amazon/aws/utils/eks_test_utils.py
index d3f0c715ce..7a403eb855 100644
--- a/tests/providers/amazon/aws/utils/eks_test_utils.py
+++ b/tests/providers/amazon/aws/utils/eks_test_utils.py
@@ -92,7 +92,7 @@ def generate_clusters(eks_hook: EksHook, num_clusters: int,
minimal: bool) -> li
"""
# Generates N clusters named cluster0, cluster1, .., clusterN
return [
- eks_hook.create_cluster(name=f"cluster{str(count)}",
**_input_builder(ClusterInputs, minimal))[
+ eks_hook.create_cluster(name=f"cluster{count}",
**_input_builder(ClusterInputs, minimal))[
ResponseAttributes.CLUSTER
][ClusterAttributes.NAME]
for count in range(num_clusters)
@@ -114,7 +114,7 @@ def generate_fargate_profiles(
# Generates N Fargate profiles named profile0, profile1, .., profileN
return [
eks_hook.create_fargate_profile(
- fargateProfileName=f"profile{str(count)}",
+ fargateProfileName=f"profile{count}",
clusterName=cluster_name,
**_input_builder(FargateProfileInputs, minimal),
)[ResponseAttributes.FARGATE_PROFILE][FargateProfileAttributes.FARGATE_PROFILE_NAME]
@@ -137,7 +137,7 @@ def generate_nodegroups(
# Generates N nodegroups named nodegroup0, nodegroup1, .., nodegroupN
return [
eks_hook.create_nodegroup(
- nodegroupName=f"nodegroup{str(count)}",
+ nodegroupName=f"nodegroup{count}",
clusterName=cluster_name,
**_input_builder(NodegroupInputs, minimal),
)[ResponseAttributes.NODEGROUP][NodegroupAttributes.NODEGROUP_NAME]
diff --git a/tests/providers/cncf/kubernetes/models/test_secret.py
b/tests/providers/cncf/kubernetes/models/test_secret.py
index d6b8a38c2d..556a5cc30a 100644
--- a/tests/providers/cncf/kubernetes/models/test_secret.py
+++ b/tests/providers/cncf/kubernetes/models/test_secret.py
@@ -113,7 +113,7 @@ class TestSecret:
"volumeMounts": [
{
"mountPath": "/etc/foo",
- "name": "secretvol" + str(static_uuid),
+ "name": f"secretvol{static_uuid}",
"readOnly": True,
},
],
@@ -123,7 +123,7 @@ class TestSecret:
"imagePullSecrets": [{"name": "pull_secret_a"}, {"name":
"pull_secret_b"}],
"securityContext": {"fsGroup": 2000, "runAsUser": 1000},
"volumes": [
- {"name": "secretvol" + str(static_uuid), "secret":
{"secretName": "secret_b"}},
+ {"name": f"secretvol{static_uuid}", "secret":
{"secretName": "secret_b"}},
],
},
}
diff --git a/tests/providers/microsoft/azure/hooks/test_asb.py
b/tests/providers/microsoft/azure/hooks/test_asb.py
index 61ce05a0b9..a9a3851561 100644
--- a/tests/providers/microsoft/azure/hooks/test_asb.py
+++ b/tests/providers/microsoft/azure/hooks/test_asb.py
@@ -30,7 +30,7 @@ from airflow.models import Connection
from airflow.providers.microsoft.azure.hooks.asb import AdminClientHook,
MessageHook
MESSAGE = "Test Message"
-MESSAGE_LIST = [MESSAGE + " " + str(n) for n in range(0, 10)]
+MESSAGE_LIST = [f"{MESSAGE} {n}" for n in range(0, 10)]
class TestAdminClientHook:
diff --git a/tests/system/providers/github/example_github.py
b/tests/system/providers/github/example_github.py
index a791a5bd71..658fcc470f 100644
--- a/tests/system/providers/github/example_github.py
+++ b/tests/system/providers/github/example_github.py
@@ -61,9 +61,9 @@ with DAG(
result = tag_name in all_tags
except GithubException as github_error: # type: ignore[misc]
- raise AirflowException(f"Failed to execute GithubSensor, error:
{str(github_error)}")
+ raise AirflowException(f"Failed to execute GithubSensor, error:
{github_error}")
except Exception as e:
- raise AirflowException(f"GitHub operator error: {str(e)}")
+ raise AirflowException(f"GitHub operator error: {e}")
return result
github_sensor = GithubSensor(
diff --git
a/tests/system/providers/microsoft/azure/example_azure_service_bus.py
b/tests/system/providers/microsoft/azure/example_azure_service_bus.py
index 7c4a99786d..1507e6281e 100644
--- a/tests/system/providers/microsoft/azure/example_azure_service_bus.py
+++ b/tests/system/providers/microsoft/azure/example_azure_service_bus.py
@@ -45,7 +45,7 @@ EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
CLIENT_ID = os.getenv("CLIENT_ID", "")
QUEUE_NAME = "sb_mgmt_queue_test"
MESSAGE = "Test Message"
-MESSAGE_LIST = [MESSAGE + " " + str(n) for n in range(0, 10)]
+MESSAGE_LIST = [f"{MESSAGE} {n}" for n in range(0, 10)]
TOPIC_NAME = "sb_mgmt_topic_test"
SUBSCRIPTION_NAME = "sb_mgmt_subscription"