This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 6b21e4b88c Bring back detection of implicit single-line string
concatenation (#31270)
6b21e4b88c is described below
commit 6b21e4b88c3d18eb1ba176e6ac53da90a4523880
Author: Jarek Potiuk <[email protected]>
AuthorDate: Sat May 13 17:23:57 2023 +0200
Bring back detection of implicit single-line string concatenation (#31270)
When we switched to ruff we've lost one of the rules that was
useful and is not enabled by default in ruff - detection of implicit
single-line string concatenation.
It is often automatically introduced by black when reformatting two
long strings and it is easy to miss, also it has worse readability
so we should bring it back.
---
airflow/providers/amazon/aws/hooks/batch_client.py | 2 +-
airflow/providers/amazon/aws/operators/batch.py | 2 +-
airflow/providers/openlineage/plugins/adapter.py | 2 +-
airflow/providers/openlineage/utils/utils.py | 2 +-
.../src/airflow_breeze/commands/release_management_commands.py | 2 +-
pyproject.toml | 2 ++
tests/providers/google/cloud/triggers/test_gcs.py | 6 +++---
tests/providers/microsoft/azure/log/test_wasb_task_handler.py | 2 +-
8 files changed, 11 insertions(+), 9 deletions(-)
diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py
b/airflow/providers/amazon/aws/hooks/batch_client.py
index 1f9917e86b..624869a06b 100644
--- a/airflow/providers/amazon/aws/hooks/batch_client.py
+++ b/airflow/providers/amazon/aws/hooks/batch_client.py
@@ -419,7 +419,7 @@ class BatchClientHook(AwsBaseHook):
return None
if len(all_info) > 1:
self.log.warning(
- f"AWS Batch job ({job_id}) has more than one log stream, "
f"only returning the first one."
+ f"AWS Batch job ({job_id}) has more than one log stream, only
returning the first one."
)
return all_info[0]
diff --git a/airflow/providers/amazon/aws/operators/batch.py
b/airflow/providers/amazon/aws/operators/batch.py
index 6acd2fd3a9..272122d109 100644
--- a/airflow/providers/amazon/aws/operators/batch.py
+++ b/airflow/providers/amazon/aws/operators/batch.py
@@ -314,7 +314,7 @@ class BatchOperator(BaseOperator):
if len(awslogs) > 1:
# there can be several log streams on multi-node jobs
self.log.warning(
- "out of all those logs, we can only link to one in the UI.
" "Using the first one."
+ "out of all those logs, we can only link to one in the UI.
Using the first one."
)
CloudWatchEventsLink.persist(
diff --git a/airflow/providers/openlineage/plugins/adapter.py
b/airflow/providers/openlineage/plugins/adapter.py
index b74ac0c481..1cb7ccf84b 100644
--- a/airflow/providers/openlineage/plugins/adapter.py
+++ b/airflow/providers/openlineage/plugins/adapter.py
@@ -52,7 +52,7 @@ _DAG_NAMESPACE = conf.get(
"openlineage", "namespace", fallback=os.getenv("OPENLINEAGE_NAMESPACE",
_DAG_DEFAULT_NAMESPACE)
)
-_PRODUCER = f"https://github.com/apache/airflow/tree/providers-openlineage/"
f"{OPENLINEAGE_PROVIDER_VERSION}"
+_PRODUCER =
f"https://github.com/apache/airflow/tree/providers-openlineage/{OPENLINEAGE_PROVIDER_VERSION}"
set_producer(_PRODUCER)
diff --git a/airflow/providers/openlineage/utils/utils.py
b/airflow/providers/openlineage/utils/utils.py
index 2a180a5973..84ad41e237 100644
--- a/airflow/providers/openlineage/utils/utils.py
+++ b/airflow/providers/openlineage/utils/utils.py
@@ -367,7 +367,7 @@ class OpenLineageRedactor(SecretsMasker):
return super()._redact(item, name, depth, max_depth)
except Exception as e:
log.warning(
- "Unable to redact %s" "Error was: %s: %s",
+ "Unable to redact %s. Error was: %s: %s",
repr(item),
type(e).__name__,
str(e),
diff --git
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index 8af5360284..c9bf21589e 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -608,7 +608,7 @@ def install_provider_packages(
package_format=package_format,
install_selected_providers=install_selected_providers
)
get_console().print(
- f"[info]Splitting {len(list_of_all_providers)} " f"providers into
max {parallelism} chunks"
+ f"[info]Splitting {len(list_of_all_providers)} providers into max
{parallelism} chunks"
)
provider_chunks = [sorted(list_of_all_providers[i::parallelism]) for i
in range(parallelism)]
# filter out empty ones
diff --git a/pyproject.toml b/pyproject.toml
index 0568ed62f9..b479dc7ac0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -46,6 +46,8 @@ extend-select = [
"UP", # Pyupgrade
"RUF100", # Unused noqa (auto-fixable)
+ # impocit single-line string concatenation
+ "ISC001",
# We ignore more pydocstyle than we enable, so be more selective at what
we enable
"D101",
"D106",
diff --git a/tests/providers/google/cloud/triggers/test_gcs.py
b/tests/providers/google/cloud/triggers/test_gcs.py
index 9a83cf473a..f40173b574 100644
--- a/tests/providers/google/cloud/triggers/test_gcs.py
+++ b/tests/providers/google/cloud/triggers/test_gcs.py
@@ -160,7 +160,7 @@ class TestGCSPrefixBlobTrigger:
@pytest.mark.asyncio
@async_mock.patch(
- "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger"
"._list_blobs_with_prefix"
+
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix"
)
async def test_gcs_prefix_blob_trigger_success(self,
mock_list_blobs_with_prefixs):
"""
@@ -177,7 +177,7 @@ class TestGCSPrefixBlobTrigger:
@pytest.mark.asyncio
@async_mock.patch(
- "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger"
"._list_blobs_with_prefix"
+
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix"
)
async def test_gcs_prefix_blob_trigger_exception(self,
mock_list_blobs_with_prefixs):
"""
@@ -191,7 +191,7 @@ class TestGCSPrefixBlobTrigger:
@pytest.mark.asyncio
@async_mock.patch(
- "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger"
"._list_blobs_with_prefix"
+
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix"
)
async def test_gcs_prefix_blob_trigger_pending(self,
mock_list_blobs_with_prefixs):
"""
diff --git a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py
b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py
index 73001ae21b..4dfe1e1e7c 100644
--- a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py
+++ b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py
@@ -113,7 +113,7 @@ class TestWasbTaskHandler:
[
(
"localhost",
- "*** Found remote logs:\n" "*** *
wasb://wasb-container/abc/hello.log\n" "Log line",
+ "*** Found remote logs:\n*** *
wasb://wasb-container/abc/hello.log\nLog line",
)
]
],