This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 3f52790d42 Resolve G004: Logging statement uses f-string (#37873)
3f52790d42 is described below
commit 3f52790d425cd51386715c240d9a38a20756de2a
Author: Andrey Anshin <[email protected]>
AuthorDate: Wed Mar 6 07:41:47 2024 +0400
Resolve G004: Logging statement uses f-string (#37873)
* Resolve G004: Logging statement uses f-string
* Remove redundant whitespace in test case logging
---
airflow/cli/commands/internal_api_command.py | 2 +-
airflow/configuration.py | 6 ++++--
airflow/operators/python.py | 8 +++++---
airflow/providers/amazon/aws/executors/ecs/ecs_executor.py | 2 +-
airflow/providers/amazon/aws/hooks/batch_client.py | 6 +++---
airflow/providers/amazon/aws/hooks/glue.py | 5 +++--
airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py | 6 ++++--
airflow/providers/amazon/aws/triggers/eks.py | 2 +-
airflow/providers/apache/beam/triggers/beam.py | 2 +-
airflow/providers/apache/kafka/operators/produce.py | 7 +++++--
airflow/providers/fab/auth_manager/decorators/auth.py | 9 ++++++---
airflow/providers/ftp/operators/ftp.py | 4 +++-
airflow/providers/google/cloud/hooks/compute_ssh.py | 2 +-
.../providers/google/cloud/transfers/gcs_to_bigquery.py | 2 +-
airflow/providers/google/cloud/transfers/s3_to_gcs.py | 7 ++-----
airflow/providers/google/cloud/transfers/sql_to_gcs.py | 5 +++--
airflow/providers/google/cloud/triggers/cloud_batch.py | 2 +-
.../google/marketing_platform/operators/analytics_admin.py | 5 +++--
.../google/marketing_platform/sensors/display_video.py | 2 +-
airflow/providers/openlineage/extractors/base.py | 14 ++++++++------
airflow/providers/openlineage/plugins/listener.py | 6 ++++--
airflow/providers/sftp/operators/sftp.py | 7 +++++--
.../providers/snowflake/transfers/copy_into_snowflake.py | 4 ++--
airflow/providers_manager.py | 2 +-
airflow/triggers/temporal.py | 2 +-
airflow/www/auth.py | 9 ++++++---
airflow/www/views.py | 2 +-
pyproject.toml | 1 -
tests/system/providers/amazon/aws/example_sagemaker.py | 4 ++--
.../providers/apache/kafka/example_dag_hello_kafka.py | 4 ++--
tests/www/views/test_views_variable.py | 2 +-
31 files changed, 82 insertions(+), 59 deletions(-)
diff --git a/airflow/cli/commands/internal_api_command.py
b/airflow/cli/commands/internal_api_command.py
index dd93801537..7094cca11b 100644
--- a/airflow/cli/commands/internal_api_command.py
+++ b/airflow/cli/commands/internal_api_command.py
@@ -70,7 +70,7 @@ def internal_api(args):
worker_timeout = args.worker_timeout
if args.debug:
- log.info(f"Starting the Internal API server on port {args.port} and
host {args.hostname}.")
+ log.info("Starting the Internal API server on port %s and host %s.",
args.port, args.hostname)
app = create_app(testing=conf.getboolean("core", "unit_test_mode"))
app.run(
debug=True, # nosec
diff --git a/airflow/configuration.py b/airflow/configuration.py
index a318fd5ac4..18c1e11b32 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -259,8 +259,10 @@ class AirflowConfigParser(ConfigParser):
if not self.is_template(section, key) and "{" in value:
errors = True
log.error(
- f"The {section}.{key} value {value} read from string
contains "
- "variable. This is not supported"
+ "The %s.%s value %s read from string contains
variable. This is not supported",
+ section,
+ key,
+ value,
)
self._default_values.set(section, key, value)
if errors:
diff --git a/airflow/operators/python.py b/airflow/operators/python.py
index da593aea39..ee4069b31b 100644
--- a/airflow/operators/python.py
+++ b/airflow/operators/python.py
@@ -936,9 +936,11 @@ class
ExternalPythonOperator(_BasePythonVirtualenvOperator):
if self.expect_airflow:
self.log.warning("When checking for Airflow installed in
virtual environment got %s", e)
self.log.warning(
- f"This means that Airflow is not properly installed by "
- f"{self.python}. Airflow context keys will not be
available. "
- f"Please Install Airflow {airflow_version} in your
environment to access them."
+ "This means that Airflow is not properly installed by %s. "
+ "Airflow context keys will not be available. "
+ "Please Install Airflow %s in your environment to access
them.",
+ self.python,
+ airflow_version,
)
return None
diff --git a/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py
b/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py
index 570f657480..5aa6f2bb85 100644
--- a/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py
+++ b/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py
@@ -208,7 +208,7 @@ class AwsEcsExecutor(BaseExecutor):
if error_code in INVALID_CREDENTIALS_EXCEPTIONS:
self.IS_BOTO_CONNECTION_HEALTHY = False
self.log.warning(
- f"AWS credentials are either missing or expired:
{error}.\nRetrying connection"
+ "AWS credentials are either missing or expired:
%s.\nRetrying connection", error
)
except Exception:
diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py
b/airflow/providers/amazon/aws/hooks/batch_client.py
index af9d79c1ae..465ca3e65b 100644
--- a/airflow/providers/amazon/aws/hooks/batch_client.py
+++ b/airflow/providers/amazon/aws/hooks/batch_client.py
@@ -438,7 +438,7 @@ class BatchClientHook(AwsBaseHook):
return None
if len(all_info) > 1:
self.log.warning(
- f"AWS Batch job ({job_id}) has more than one log stream, only
returning the first one."
+ "AWS Batch job (%s) has more than one log stream, only
returning the first one.", job_id
)
return all_info[0]
@@ -474,7 +474,7 @@ class BatchClientHook(AwsBaseHook):
# If the user selected another logDriver than "awslogs", then
CloudWatch logging is disabled.
if any(c.get("logDriver", "awslogs") != "awslogs" for c in
log_configs):
self.log.warning(
- f"AWS Batch job ({job_id}) uses non-aws log drivers. AWS
CloudWatch logging disabled."
+ "AWS Batch job (%s) uses non-aws log drivers. AWS CloudWatch
logging disabled.", job_id
)
return []
@@ -482,7 +482,7 @@ class BatchClientHook(AwsBaseHook):
# If this method is called very early after starting the AWS Batch
job,
# there is a possibility that the AWS CloudWatch Stream Name would
not exist yet.
# This can also happen in case of misconfiguration.
- self.log.warning(f"AWS Batch job ({job_id}) doesn't have any AWS
CloudWatch Stream.")
+ self.log.warning("AWS Batch job (%s) doesn't have any AWS
CloudWatch Stream.", job_id)
return []
# Try to get user-defined log configuration options
diff --git a/airflow/providers/amazon/aws/hooks/glue.py
b/airflow/providers/amazon/aws/hooks/glue.py
index baf6780e07..3156bdfb3a 100644
--- a/airflow/providers/amazon/aws/hooks/glue.py
+++ b/airflow/providers/amazon/aws/hooks/glue.py
@@ -245,8 +245,9 @@ class GlueJobHook(AwsBaseHook):
if e.response["Error"]["Code"] == "ResourceNotFoundException":
# we land here when the log groups/streams don't exist yet
self.log.warning(
- "No new Glue driver logs so far.\nIf this persists,
check the CloudWatch dashboard "
- f"at:
https://{self.conn_region_name}.console.aws.amazon.com/cloudwatch/home"
+ "No new Glue driver logs so far.\n"
+ "If this persists, check the CloudWatch dashboard at:
%r.",
+
f"https://{self.conn_region_name}.console.aws.amazon.com/cloudwatch/home",
)
else:
raise
diff --git a/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
b/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
index 96a129b2dc..d62931a765 100644
--- a/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
@@ -120,8 +120,10 @@ class AzureBlobStorageToS3Operator(BaseOperator):
)
self.log.info(
- f"Getting list of the files in Container: {self.container_name}; "
- f"Prefix: {self.prefix}; Delimiter: {self.delimiter};"
+ "Getting list of the files in Container: %r; Prefix: %r;
Delimiter: %r.",
+ self.container_name,
+ self.prefix,
+ self.delimiter,
)
files = wasb_hook.get_blobs_list_recursive(
diff --git a/airflow/providers/amazon/aws/triggers/eks.py
b/airflow/providers/amazon/aws/triggers/eks.py
index cbfb08b42f..187ef8edbb 100644
--- a/airflow/providers/amazon/aws/triggers/eks.py
+++ b/airflow/providers/amazon/aws/triggers/eks.py
@@ -214,7 +214,7 @@ class EksDeleteClusterTrigger(AwsBaseWaiterTrigger):
)
self.log.info("All Fargate profiles deleted")
else:
- self.log.info(f"No Fargate profiles associated with cluster
{self.cluster_name}")
+ self.log.info("No Fargate profiles associated with cluster %s",
self.cluster_name)
class EksCreateFargateProfileTrigger(AwsBaseWaiterTrigger):
diff --git a/airflow/providers/apache/beam/triggers/beam.py
b/airflow/providers/apache/beam/triggers/beam.py
index 34a9c744e2..5b1f7a99d5 100644
--- a/airflow/providers/apache/beam/triggers/beam.py
+++ b/airflow/providers/apache/beam/triggers/beam.py
@@ -227,7 +227,7 @@ class BeamJavaPipelineTrigger(BeamPipelineBaseTrigger):
)
is_running = bool([job async for job in jobs if job.name
== self.job_name])
except Exception as e:
- self.log.exception(f"Exception occurred while requesting
jobs with name {self.job_name}")
+ self.log.exception("Exception occurred while requesting
jobs with name %s", self.job_name)
yield TriggerEvent({"status": "error", "message": str(e)})
return
if is_running:
diff --git a/airflow/providers/apache/kafka/operators/produce.py
b/airflow/providers/apache/kafka/operators/produce.py
index 29ed605961..913650ed95 100644
--- a/airflow/providers/apache/kafka/operators/produce.py
+++ b/airflow/providers/apache/kafka/operators/produce.py
@@ -30,10 +30,13 @@ local_logger = logging.getLogger("airflow")
def acked(err, msg):
if err is not None:
- local_logger.error(f"Failed to deliver message: {err}")
+ local_logger.error("Failed to deliver message: %s", err)
else:
local_logger.info(
- f"Produced record to topic {msg.topic()} partition
[{msg.partition()}] @ offset {msg.offset()}"
+ "Produced record to topic %s, partition [%s] @ offset %s",
+ msg.topic(),
+ msg.partition(),
+ msg.offset(),
)
diff --git a/airflow/providers/fab/auth_manager/decorators/auth.py
b/airflow/providers/fab/auth_manager/decorators/auth.py
index 7089be08fc..cfd799cf0f 100644
--- a/airflow/providers/fab/auth_manager/decorators/auth.py
+++ b/airflow/providers/fab/auth_manager/decorators/auth.py
@@ -93,11 +93,14 @@ def _has_access_fab(permissions: Sequence[tuple[str, str]]
| None = None) -> Cal
if len(unique_dag_ids) > 1:
log.warning(
- f"There are different dag_ids passed in the request:
{unique_dag_ids}. Returning 403."
+ "There are different dag_ids passed in the request: %s.
Returning 403.", unique_dag_ids
)
log.warning(
- f"kwargs: {dag_id_kwargs}, args: {dag_id_args}, "
- f"form: {dag_id_form}, json: {dag_id_json}"
+ "kwargs: %s, args: %s, form: %s, json: %s",
+ dag_id_kwargs,
+ dag_id_args,
+ dag_id_form,
+ dag_id_json,
)
return (
render_template(
diff --git a/airflow/providers/ftp/operators/ftp.py
b/airflow/providers/ftp/operators/ftp.py
index 654c60bf7e..f38e663754 100644
--- a/airflow/providers/ftp/operators/ftp.py
+++ b/airflow/providers/ftp/operators/ftp.py
@@ -156,7 +156,9 @@ class FTPFileTransmitOperator(BaseOperator):
local_host = socket.gethostbyname(local_host)
except Exception as e:
self.log.warning(
- f"Failed to resolve local hostname. Using the hostname got by
socket.gethostbyname() without resolution. {e}",
+ "Failed to resolve local hostname. "
+ "Using the hostname got by socket.gethostbyname() without
resolution. %s",
+ e,
exc_info=True,
)
diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py
b/airflow/providers/google/cloud/hooks/compute_ssh.py
index ee6eb63b02..2bc5dcf514 100644
--- a/airflow/providers/google/cloud/hooks/compute_ssh.py
+++ b/airflow/providers/google/cloud/hooks/compute_ssh.py
@@ -281,7 +281,7 @@ class ComputeEngineSSHHook(SSHHook):
if retry == self.max_retries:
raise AirflowException("Maximum retries exceeded. Aborting
operation.")
delay = random.randint(0, max_delay)
- self.log.info(f"Failed establish SSH connection, waiting
{delay} seconds to retry...")
+ self.log.info("Failed establish SSH connection, waiting %s
seconds to retry...", delay)
time.sleep(delay)
if not sshclient:
raise AirflowException("Unable to establish SSH connection.")
diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
index 2ebe2bda05..be4b65791d 100644
--- a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
+++ b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
@@ -467,7 +467,7 @@ class GCSToBigQueryOperator(BaseOperator):
impersonation_chain=self.impersonation_chain,
)
if self.max_id_key:
- self.log.info(f"Selecting the MAX value from BigQuery column
'{self.max_id_key}'...")
+ self.log.info("Selecting the MAX value from BigQuery column
%r...", self.max_id_key)
select_command = (
f"SELECT MAX({self.max_id_key}) AS max_value "
f"FROM {self.destination_project_dataset_table}"
diff --git a/airflow/providers/google/cloud/transfers/s3_to_gcs.py
b/airflow/providers/google/cloud/transfers/s3_to_gcs.py
index 92dc97b4cc..b2b306f74b 100644
--- a/airflow/providers/google/cloud/transfers/s3_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/s3_to_gcs.py
@@ -319,14 +319,11 @@ class S3ToGCSOperator(S3ListOperator):
body[TRANSFER_SPEC][OBJECT_CONDITIONS][INCLUDE_PREFIXES] =
files_chunk
job = transfer_hook.create_transfer_job(body=body)
- s = "s" if len(files_chunk) > 1 else ""
- self.log.info(f"Submitted job {job['name']} to transfer
{len(files_chunk)} file{s}")
+ self.log.info("Submitted job %s to transfer %s file(s).",
job["name"], len(files_chunk))
job_names.append(job["name"])
if len(files) > chunk_size:
- js = "s" if len(job_names) > 1 else ""
- fs = "s" if len(files) > 1 else ""
- self.log.info(f"Overall submitted {len(job_names)} job{js} to
transfer {len(files)} file{fs}")
+ self.log.info("Overall submitted %s job(s) to transfer %s
file(s).", len(job_names), len(files))
return job_names
diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py
b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
index efb5693dcf..11f9df4c9a 100644
--- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
@@ -153,9 +153,10 @@ class BaseSQLToGCSOperator(BaseOperator):
def execute(self, context: Context):
if self.partition_columns:
self.log.info(
- f"Found partition columns: {','.join(self.partition_columns)}.
"
+ "Found partition columns: %s. "
"Assuming the SQL statement is properly sorted by these
columns in "
- "ascending or descending order."
+ "ascending or descending order.",
+ ",".join(self.partition_columns),
)
self.log.info("Executing query")
diff --git a/airflow/providers/google/cloud/triggers/cloud_batch.py
b/airflow/providers/google/cloud/triggers/cloud_batch.py
index 21b86688ab..78032ee067 100644
--- a/airflow/providers/google/cloud/triggers/cloud_batch.py
+++ b/airflow/providers/google/cloud/triggers/cloud_batch.py
@@ -140,7 +140,7 @@ class CloudBatchJobFinishedTrigger(BaseTrigger):
yield TriggerEvent({"status": "error", "message": str(e)})
return
- self.log.exception(f"Job with name [{self.job_name}] timed out")
+ self.log.exception("Job with name [%s] timed out", self.job_name)
yield TriggerEvent(
{
"job_name": self.job_name,
diff --git
a/airflow/providers/google/marketing_platform/operators/analytics_admin.py
b/airflow/providers/google/marketing_platform/operators/analytics_admin.py
index d961630f86..a0138099f1 100644
--- a/airflow/providers/google/marketing_platform/operators/analytics_admin.py
+++ b/airflow/providers/google/marketing_platform/operators/analytics_admin.py
@@ -105,8 +105,9 @@ class
GoogleAnalyticsAdminListAccountsOperator(GoogleCloudBaseOperator):
impersonation_chain=self.impersonation_chain,
)
self.log.info(
- "Requesting list of Google Analytics accounts. "
- f"Page size: {self.page_size}, page token: {self.page_token}"
+ "Requesting list of Google Analytics accounts. Page size: %s, page
token: %s",
+ self.page_size,
+ self.page_token,
)
accounts = hook.list_accounts(
page_size=self.page_size,
diff --git
a/airflow/providers/google/marketing_platform/sensors/display_video.py
b/airflow/providers/google/marketing_platform/sensors/display_video.py
index 75186abd01..b002682a14 100644
--- a/airflow/providers/google/marketing_platform/sensors/display_video.py
+++ b/airflow/providers/google/marketing_platform/sensors/display_video.py
@@ -157,7 +157,7 @@ class
GoogleDisplayVideo360RunQuerySensor(BaseSensorOperator):
response = hook.get_report(query_id=self.query_id,
report_id=self.report_id)
status = response.get("metadata", {}).get("status", {}).get("state")
- self.log.info(f"STATUS OF THE REPORT {self.report_id} FOR QUERY
{self.query_id}: {status}")
+ self.log.info("STATUS OF THE REPORT %s FOR QUERY %s: %s",
self.report_id, self.query_id, status)
if response and status in ["DONE", "FAILED"]:
return True
return False
diff --git a/airflow/providers/openlineage/extractors/base.py
b/airflow/providers/openlineage/extractors/base.py
index d87334f486..3f455af86a 100644
--- a/airflow/providers/openlineage/extractors/base.py
+++ b/airflow/providers/openlineage/extractors/base.py
@@ -87,8 +87,9 @@ class BaseExtractor(ABC, LoggingMixin):
def extract(self) -> OperatorLineage | None:
if self._is_operator_disabled:
self.log.debug(
- f"Skipping extraction for operator {self.operator.task_type} "
- "due to its presence in [openlineage]
openlineage_disabled_for_operators."
+ "Skipping extraction for operator %s "
+ "due to its presence in [openlineage]
openlineage_disabled_for_operators.",
+ self.operator.task_type,
)
return None
return self._execute_extraction()
@@ -121,16 +122,17 @@ class DefaultExtractor(BaseExtractor):
return None
except AttributeError:
self.log.debug(
- f"Operator {self.operator.task_type} does not have the "
- "get_openlineage_facets_on_start method."
+ "Operator %s does not have the get_openlineage_facets_on_start
method.",
+ self.operator.task_type,
)
return None
def extract_on_complete(self, task_instance) -> OperatorLineage | None:
if self._is_operator_disabled:
self.log.debug(
- f"Skipping extraction for operator {self.operator.task_type} "
- "due to its presence in [openlineage]
openlineage_disabled_for_operators."
+ "Skipping extraction for operator %s "
+ "due to its presence in [openlineage]
openlineage_disabled_for_operators.",
+ self.operator.task_type,
)
return None
if task_instance.state == TaskInstanceState.FAILED:
diff --git a/airflow/providers/openlineage/plugins/listener.py
b/airflow/providers/openlineage/plugins/listener.py
index af5405f92a..8957cd8ad6 100644
--- a/airflow/providers/openlineage/plugins/listener.py
+++ b/airflow/providers/openlineage/plugins/listener.py
@@ -61,8 +61,10 @@ class OpenLineageListener:
):
if not hasattr(task_instance, "task"):
self.log.warning(
- f"No task set for TI object task_id: {task_instance.task_id} -
"
- f"dag_id: {task_instance.dag_id} - run_id
{task_instance.run_id}"
+ "No task set for TI object task_id: %s - dag_id: %s - run_id
%s",
+ task_instance.task_id,
+ task_instance.dag_id,
+ task_instance.run_id,
)
return
diff --git a/airflow/providers/sftp/operators/sftp.py
b/airflow/providers/sftp/operators/sftp.py
index 64367716a0..d357cfbbc7 100644
--- a/airflow/providers/sftp/operators/sftp.py
+++ b/airflow/providers/sftp/operators/sftp.py
@@ -210,7 +210,9 @@ class SFTPOperator(BaseOperator):
local_host = socket.gethostbyname(local_host)
except Exception as e:
self.log.warning(
- f"Failed to resolve local hostname. Using the hostname got by
socket.gethostbyname() without resolution. {e}",
+ "Failed to resolve local hostname. "
+ "Using the hostname got by socket.gethostbyname() without
resolution. %s",
+ e,
exc_info=True,
)
@@ -225,7 +227,8 @@ class SFTPOperator(BaseOperator):
remote_host = socket.gethostbyname(remote_host)
except OSError as e:
self.log.warning(
- f"Failed to resolve remote hostname. Using the provided
hostname without resolution. {e}",
+ "Failed to resolve remote hostname. Using the provided
hostname without resolution. %s",
+ e,
exc_info=True,
)
diff --git a/airflow/providers/snowflake/transfers/copy_into_snowflake.py
b/airflow/providers/snowflake/transfers/copy_into_snowflake.py
index 0048637bf0..50b1ed6af3 100644
--- a/airflow/providers/snowflake/transfers/copy_into_snowflake.py
+++ b/airflow/providers/snowflake/transfers/copy_into_snowflake.py
@@ -254,8 +254,8 @@ class
CopyFromExternalStageToSnowflakeOperator(BaseOperator):
run_facets = {}
if extraction_error_files:
self.log.debug(
- f"Unable to extract Dataset namespace and name "
- f"for the following files: `{extraction_error_files}`."
+ "Unable to extract Dataset namespace and name for the
following files: `%s`.",
+ extraction_error_files,
)
run_facets["extractionError"] = ExtractionErrorRunFacet(
totalTasks=len(query_results),
diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py
index cce54d81d1..1491231c13 100644
--- a/airflow/providers_manager.py
+++ b/airflow/providers_manager.py
@@ -657,7 +657,7 @@ class ProvidersManager(LoggingMixin, metaclass=Singleton):
seen.add(path)
self._add_provider_info_from_local_source_files_on_path(path)
except Exception as e:
- log.warning(f"Error when loading 'provider.yaml' files from
{path} airflow sources: {e}")
+ log.warning("Error when loading 'provider.yaml' files from %s
airflow sources: %s", path, e)
def _add_provider_info_from_local_source_files_on_path(self, path) -> None:
"""
diff --git a/airflow/triggers/temporal.py b/airflow/triggers/temporal.py
index 03f0901db7..79e8f39dd7 100644
--- a/airflow/triggers/temporal.py
+++ b/airflow/triggers/temporal.py
@@ -63,7 +63,7 @@ class DateTimeTrigger(BaseTrigger):
for step in 3600, 60, 10:
seconds_remaining = (self.moment -
pendulum.instance(timezone.utcnow())).total_seconds()
while seconds_remaining > 2 * step:
- self.log.info(f"{int(seconds_remaining)} seconds remaining;
sleeping {step} seconds")
+ self.log.info("%d seconds remaining; sleeping %s seconds",
seconds_remaining, step)
await asyncio.sleep(step)
seconds_remaining = (self.moment -
pendulum.instance(timezone.utcnow())).total_seconds()
# Sleep a second at a time otherwise
diff --git a/airflow/www/auth.py b/airflow/www/auth.py
index 39c8444f99..1b3753f87d 100644
--- a/airflow/www/auth.py
+++ b/airflow/www/auth.py
@@ -222,11 +222,14 @@ def has_access_dag(method: ResourceMethod, access_entity:
DagAccessEntity | None
if len(unique_dag_ids) > 1:
log.warning(
- f"There are different dag_ids passed in the request:
{unique_dag_ids}. Returning 403."
+ "There are different dag_ids passed in the request: %s.
Returning 403.", unique_dag_ids
)
log.warning(
- f"kwargs: {dag_id_kwargs}, args: {dag_id_args}, "
- f"form: {dag_id_form}, json: {dag_id_json}"
+ "kwargs: %s, args: %s, form: %s, json: %s",
+ dag_id_kwargs,
+ dag_id_args,
+ dag_id_form,
+ dag_id_json,
)
return (
render_template(
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 7f52708d9b..f94e9624d3 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -4896,7 +4896,7 @@ class VariableModelView(AirflowModelView):
if action_on_existing == "fail" and existing_keys:
failed_repr = ", ".join(repr(k) for k in sorted(existing_keys))
flash(f"Failed. The variables with these keys: {failed_repr}
already exists.")
- logger.error(f"Failed. The variables with these keys:
{failed_repr} already exists.")
+ logger.error("Failed. The variables with these keys: %s
already exists.", failed_repr)
self.update_redirect()
return redirect(self.get_redirect())
skipped = set()
diff --git a/pyproject.toml b/pyproject.toml
index 0fa7876811..c52e63c2c0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1339,7 +1339,6 @@ extend-select = [
"B006", # Checks for uses of mutable objects as function argument defaults.
]
ignore = [
- "G004", # Logging statement uses f-string (not fixed yet)
"D203",
"D212",
"D213",
diff --git a/tests/system/providers/amazon/aws/example_sagemaker.py
b/tests/system/providers/amazon/aws/example_sagemaker.py
index 84fa9582f2..8c41b9b543 100644
--- a/tests/system/providers/amazon/aws/example_sagemaker.py
+++ b/tests/system/providers/amazon/aws/example_sagemaker.py
@@ -458,8 +458,8 @@ def delete_docker_image(image_name):
if docker_build.returncode != 0:
logger.error(
"Failed to delete local docker image. "
- "Run 'docker images' to see if you need to clean it yourself.\n"
- f"error message: {stderr}"
+ "Run 'docker images' to see if you need to clean it
yourself.\nerror message: %s",
+ stderr,
)
diff --git a/tests/system/providers/apache/kafka/example_dag_hello_kafka.py
b/tests/system/providers/apache/kafka/example_dag_hello_kafka.py
index 57e67ac81a..d12e46f575 100644
--- a/tests/system/providers/apache/kafka/example_dag_hello_kafka.py
+++ b/tests/system/providers/apache/kafka/example_dag_hello_kafka.py
@@ -125,7 +125,7 @@ consumer_logger = logging.getLogger("airflow")
def consumer_function(message, prefix=None):
key = json.loads(message.key())
value = json.loads(message.value())
- consumer_logger.info(f"{prefix} {message.topic()} @ {message.offset()};
{key} : {value}")
+ consumer_logger.info("%s %s @ %s; %s : %s", prefix, message.topic(),
message.offset(), key, value)
return
@@ -133,7 +133,7 @@ def consumer_function_batch(messages, prefix=None):
for message in messages:
key = json.loads(message.key())
value = json.loads(message.value())
- consumer_logger.info(f"{prefix} {message.topic()} @
{message.offset()}; {key} : {value}")
+ consumer_logger.info("%s %s @ %s; %s : %s", prefix, message.topic(),
message.offset(), key, value)
return
diff --git a/tests/www/views/test_views_variable.py
b/tests/www/views/test_views_variable.py
index 4fce79eb45..e6544b608d 100644
--- a/tests/www/views/test_views_variable.py
+++ b/tests/www/views/test_views_variable.py
@@ -177,7 +177,7 @@ def
test_import_variables_fails_if_action_if_exists_is_fail(session, admin_clien
data={"file": (bytes_content, "test.json"), "action_if_exists":
"fail"},
follow_redirects=True,
)
- assert "Failed. The variables with these keys: 'str_key' already exists."
in caplog.text
+ assert "Failed. The variables with these keys: 'str_key' already exists."
in caplog.text
def test_import_variables_anon(session, app):