This is an automated email from the ASF dual-hosted git repository.
joshfell pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 1cc9fe1df1 Add flake8-implicit-str-concat check to Ruff (#36597)
1cc9fe1df1 is described below
commit 1cc9fe1df111950327e9922b00222846196b029d
Author: Josh Fell <[email protected]>
AuthorDate: Fri Jan 5 09:22:32 2024 -0500
Add flake8-implicit-str-concat check to Ruff (#36597)
This was enabled initially in #23873, but hasn't been part of the Ruff
checks to-date. Let's add it!
---
airflow/providers/weaviate/hooks/weaviate.py | 4 +---
dev/breeze/src/airflow_breeze/commands/ci_image_commands.py | 2 +-
.../src/airflow_breeze/commands/production_image_commands.py | 2 +-
.../src/airflow_breeze/commands/release_management_commands.py | 2 +-
.../airflow_breeze/prepare_providers/provider_documentation.py | 6 +++---
.../src/airflow_breeze/prepare_providers/provider_packages.py | 6 ++----
dev/breeze/src/airflow_breeze/utils/packages.py | 4 ++--
pyproject.toml | 1 +
scripts/ci/pre_commit/pre_commit_check_provider_docs.py | 2 +-
scripts/in_container/run_generate_constraints.py | 2 +-
tests/api_connexion/endpoints/test_xcom_endpoint.py | 2 +-
tests/providers/amazon/aws/links/test_batch.py | 2 +-
tests/providers/amazon/aws/links/test_emr.py | 4 ++--
tests/providers/weaviate/hooks/test_weaviate.py | 8 +++-----
14 files changed, 21 insertions(+), 26 deletions(-)
diff --git a/airflow/providers/weaviate/hooks/weaviate.py
b/airflow/providers/weaviate/hooks/weaviate.py
index e67061f757..470d9bf54f 100644
--- a/airflow/providers/weaviate/hooks/weaviate.py
+++ b/airflow/providers/weaviate/hooks/weaviate.py
@@ -280,9 +280,7 @@ class WeaviateHook(BaseHook):
intersection_classes =
set__exiting_classes.intersection(set__to_be_added_classes)
classes_to_create = set()
if existing == "fail" and intersection_classes:
- raise ValueError(
- f"Trying to create class {intersection_classes}" f" but this
class already exists."
- )
+ raise ValueError(f"Trying to create class {intersection_classes}
but this class already exists.")
elif existing == "ignore":
classes_to_create = set__to_be_added_classes - set__exiting_classes
elif existing == "replace":
diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py
b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py
index 981124540f..0ee6b7d405 100644
--- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py
@@ -619,7 +619,7 @@ def verify(
)
if (pull or image_name) and run_in_parallel:
get_console().print(
- "[error]You cannot use --pull,--image-name and --run-in-parallel
at the same time. " "Exiting[/]"
+ "[error]You cannot use --pull,--image-name and --run-in-parallel
at the same time. Exiting[/]"
)
sys.exit(1)
if run_in_parallel:
diff --git
a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
index 9c2cb3a57c..f1d8a942bb 100644
--- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
@@ -553,7 +553,7 @@ def verify(
)
if (pull or image_name) and run_in_parallel:
get_console().print(
- "[error]You cannot use --pull,--image-name and --run-in-parallel
at the same time. " "Exiting[/]"
+ "[error]You cannot use --pull,--image-name and --run-in-parallel
at the same time. Exiting[/]"
)
sys.exit(1)
if run_in_parallel:
diff --git
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index d7ae88fd13..17ab72f68e 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -488,7 +488,7 @@ def basic_provider_checks(provider_package_id: str) ->
dict[str, Any]:
f"since you asked for it, it will be built [/]\n"
)
elif provider_metadata.get("state") == "suspended":
- get_console().print(f"[warning]The package: {provider_package_id} is
suspended " f"skipping it [/]\n")
+ get_console().print(f"[warning]The package: {provider_package_id} is
suspended skipping it [/]\n")
raise PackageSuspendedException()
return provider_metadata
diff --git
a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
index 391edb6056..6f3a4945a0 100644
--- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
+++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
@@ -348,7 +348,7 @@ def _get_all_changes_for_package(
)
else:
get_console().print(
- f"[info]New version of the '{provider_package_id}' " f"package is
ready to be released!\n"
+ f"[info]New version of the '{provider_package_id}' package is
ready to be released!\n"
)
next_version_tag = f"{HTTPS_REMOTE}/{base_branch}"
changes_table = ""
@@ -413,7 +413,7 @@ def _ask_the_user_for_the_type_of_changes(non_interactive:
bool) -> TypeOfChange
if given_answer in type_of_changes_array:
return TypeOfChange(given_answer)
get_console().print(
- f"[warning] Wrong answer given: '{given_answer}'. " f"Should be
one of {display_answers}"
+ f"[warning] Wrong answer given: '{given_answer}'. Should be one of
{display_answers}"
)
@@ -680,7 +680,7 @@ def update_release_notes(
answer = user_confirm(f"Provider {provider_package_id} marked
for release. Proceed?")
if answer == Answer.NO:
get_console().print(
- f"\n[warning]Skipping provider: {provider_package_id} "
f"on user request![/]\n"
+ f"\n[warning]Skipping provider: {provider_package_id} on
user request![/]\n"
)
raise PrepareReleaseDocsUserSkippedException()
elif answer == Answer.QUIT:
diff --git
a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py
b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py
index a158334f7d..afbef0603d 100644
--- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py
+++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py
@@ -86,7 +86,7 @@ def copy_provider_sources_to_target(provider_id: str) -> Path:
relative_provider_path =
source_provider_sources_path.relative_to(AIRFLOW_SOURCES_ROOT)
target_providers_sub_folder = target_provider_root_path /
relative_provider_path
get_console().print(
- f"[info]Copying provider sources: " f"{source_provider_sources_path}
-> {target_providers_sub_folder}"
+ f"[info]Copying provider sources: {source_provider_sources_path} ->
{target_providers_sub_folder}"
)
copytree(source_provider_sources_path, target_providers_sub_folder)
shutil.copy(AIRFLOW_SOURCES_ROOT / "LICENSE", target_providers_sub_folder
/ "LICENSE")
@@ -214,9 +214,7 @@ def build_provider_package(provider_id: str,
target_provider_root_sources_path:
except subprocess.CalledProcessError as ex:
get_console().print("[error]The command returned an error %s", ex)
raise PrepareReleasePackageErrorBuildingPackageException()
- get_console().print(
- f"\n[info]Prepared provider package {provider_id} in " f"format
{package_format}[/]\n"
- )
+ get_console().print(f"\n[info]Prepared provider package {provider_id} in
format {package_format}[/]\n")
def move_built_packages_and_cleanup(
diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py
b/dev/breeze/src/airflow_breeze/utils/packages.py
index 49626a4668..0dd1cd5a7d 100644
--- a/dev/breeze/src/airflow_breeze/utils/packages.py
+++ b/dev/breeze/src/airflow_breeze/utils/packages.py
@@ -320,7 +320,7 @@ def get_short_package_name(long_form_provider: str) -> str:
else:
if not long_form_provider.startswith(LONG_PROVIDERS_PREFIX):
raise ValueError(
- f"Invalid provider name: {long_form_provider}. " f"Should
start with {LONG_PROVIDERS_PREFIX}"
+ f"Invalid provider name: {long_form_provider}. Should start
with {LONG_PROVIDERS_PREFIX}"
)
return long_form_provider[len(LONG_PROVIDERS_PREFIX) :].replace("-",
".")
@@ -661,7 +661,7 @@ def
make_sure_remote_apache_exists_and_fetch(github_repository: str = "apache/ai
)
else:
get_console().print(
- f"[error]Error {ex}[/]\n" f"[error]When checking if
{HTTPS_REMOTE} is set.[/]\n\n"
+ f"[error]Error {ex}[/]\n[error]When checking if {HTTPS_REMOTE}
is set.[/]\n\n"
)
sys.exit(1)
get_console().print("[info]Fetching full history and tags from remote.")
diff --git a/pyproject.toml b/pyproject.toml
index 7a305243a1..e7db87a433 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -62,6 +62,7 @@ extend-select = [
"TCH", # Rules around TYPE_CHECKING blocks
"TID251", # Specific modules or module members that may not be imported
or accessed
"TID253", # Ban certain modules from being imported at module level
+ "ISC", # Checks for implicit literal string concatenation (auto-fixable)
]
extend-ignore = [
"D203",
diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_docs.py
b/scripts/ci/pre_commit/pre_commit_check_provider_docs.py
index 0681b02287..51b3b2ae00 100755
--- a/scripts/ci/pre_commit/pre_commit_check_provider_docs.py
+++ b/scripts/ci/pre_commit/pre_commit_check_provider_docs.py
@@ -172,7 +172,7 @@ def check_documentation_link_exists(link: str,
doc_file_name: str):
fail_pre_commit = True
console.print()
console.print(
- f"[red]ERROR! The {docs_file} does not contain:\n:[/]" f"{link}\n"
f"[bright_blue]Please add it!"
+ f"[red]ERROR! The {docs_file} does not
contain:\n:[/]{link}\n[bright_blue]Please add it!"
)
console.print()
diff --git a/scripts/in_container/run_generate_constraints.py
b/scripts/in_container/run_generate_constraints.py
index e03eff5126..a73d2f649a 100755
--- a/scripts/in_container/run_generate_constraints.py
+++ b/scripts/in_container/run_generate_constraints.py
@@ -358,7 +358,7 @@ def generate_constraints_no_providers(config_params:
ConfigParams) -> None:
core_dependencies = get_core_airflow_dependencies()
uninstall_all_packages(config_params)
console.print(
- f"[bright_blue]Installing airflow with [{core_dependencies}] extras
only " f"with eager upgrade."
+ f"[bright_blue]Installing airflow with [{core_dependencies}] extras
only with eager upgrade."
)
install_local_airflow_with_eager_upgrade(
config_params, config_params.eager_upgrade_additional_requirements,
core_dependencies
diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py
b/tests/api_connexion/endpoints/test_xcom_endpoint.py
index 2efb08f705..1e4dbb5678 100644
--- a/tests/api_connexion/endpoints/test_xcom_endpoint.py
+++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py
@@ -478,7 +478,7 @@ class TestGetXComEntries(TestXComEndpoint):
def assert_expected_result(expected_entries, key=None):
response = self.client.get(
- "/api/v1/dags/~/dagRuns/~/taskInstances/~/xcomEntries"
f"{('?xcom_key='+key )}",
+
f"/api/v1/dags/~/dagRuns/~/taskInstances/~/xcomEntries?xcom_key={key}",
environ_overrides={"REMOTE_USER": "test"},
)
diff --git a/tests/providers/amazon/aws/links/test_batch.py
b/tests/providers/amazon/aws/links/test_batch.py
index 23a11193ba..2c8c9b59cb 100644
--- a/tests/providers/amazon/aws/links/test_batch.py
+++ b/tests/providers/amazon/aws/links/test_batch.py
@@ -57,7 +57,7 @@ class TestBatchJobQueueLink(BaseAwsLinksTestCase):
def test_extra_link(self):
self.assert_extra_link_url(
expected_url=(
- "https://console.aws.amazon.com/batch/home"
"?region=us-east-1#queues/detail/arn:fake:jq"
+
"https://console.aws.amazon.com/batch/home?region=us-east-1#queues/detail/arn:fake:jq"
),
region_name="us-east-1",
aws_partition="aws",
diff --git a/tests/providers/amazon/aws/links/test_emr.py
b/tests/providers/amazon/aws/links/test_emr.py
index c7f12983e8..590e7f1c61 100644
--- a/tests/providers/amazon/aws/links/test_emr.py
+++ b/tests/providers/amazon/aws/links/test_emr.py
@@ -30,7 +30,7 @@ class TestEmrClusterLink(BaseAwsLinksTestCase):
def test_extra_link(self):
self.assert_extra_link_url(
expected_url=(
- "https://console.aws.amazon.com/emr/home"
"?region=us-west-1#/clusterDetails/j-TEST-FLOW-ID"
+
"https://console.aws.amazon.com/emr/home?region=us-west-1#/clusterDetails/j-TEST-FLOW-ID"
),
region_name="us-west-1",
aws_partition="aws",
@@ -57,7 +57,7 @@ class TestEmrLogsLink(BaseAwsLinksTestCase):
def test_extra_link(self):
self.assert_extra_link_url(
expected_url=(
- "https://console.aws.amazon.com/s3/buckets/myLogUri/"
"?region=eu-west-2&prefix=j-8989898989/"
+
"https://console.aws.amazon.com/s3/buckets/myLogUri/?region=eu-west-2&prefix=j-8989898989/"
),
region_name="eu-west-2",
aws_partition="aws",
diff --git a/tests/providers/weaviate/hooks/test_weaviate.py
b/tests/providers/weaviate/hooks/test_weaviate.py
index 9d931b9397..075b3816fa 100644
--- a/tests/providers/weaviate/hooks/test_weaviate.py
+++ b/tests/providers/weaviate/hooks/test_weaviate.py
@@ -684,13 +684,13 @@ def test___generate_uuids(generate_uuid5, weaviate_hook):
{"id": [1, 2], "name": ["ross", "bob"], "age": ["12", "22"], "gender":
["m", "m"]}
)
with pytest.raises(
- ValueError, match=r"Property 'id' already in dataset. Consider
renaming or specify" r" 'uuid_column'"
+ ValueError, match=r"Property 'id' already in dataset. Consider
renaming or specify 'uuid_column'"
):
weaviate_hook._generate_uuids(df=df, class_name="test",
unique_columns=["name", "age", "gender"])
with pytest.raises(
ValueError,
- match=r"Property age already in dataset. Consider renaming or specify"
r" a different 'uuid_column'.",
+ match=r"Property age already in dataset. Consider renaming or specify
a different 'uuid_column'.",
):
weaviate_hook._generate_uuids(
df=df, uuid_column="age", class_name="test",
unique_columns=["name", "age", "gender"]
@@ -773,9 +773,7 @@ def test_error_option_of_create_or_replace_document_objects(
_get_segregated_documents.return_value = ({}, {"abc.xml"}, {},
{"zyx.html"})
_generate_uuids.return_value = (df, "id")
- with pytest.raises(
- ValueError, match="Documents abc.xml already exists. You can either" "
skip or replace"
- ):
+ with pytest.raises(ValueError, match="Documents abc.xml already exists.
You can either skip or replace"):
weaviate_hook.create_or_replace_document_objects(
data=df, document_column="doc", class_name="test", existing="error"
)