This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new e7a3f7f0bd Enable PT022: pytest-useless-yield-fixture (#37788)
e7a3f7f0bd is described below

commit e7a3f7f0bdf7866ca29d1eea2e6c23bcc1e7c36c
Author: Andrey Anshin <[email protected]>
AuthorDate: Fri Mar 1 02:04:44 2024 +0400

    Enable PT022: pytest-useless-yield-fixture (#37788)
---
 pyproject.toml                                            | 6 ++++++
 tests/cli/commands/test_info_command.py                   | 2 +-
 tests/conftest.py                                         | 1 -
 tests/core/test_impersonation_tests.py                    | 2 --
 tests/jobs/test_local_task_job.py                         | 1 -
 tests/models/test_dagbag.py                               | 9 ++++-----
 tests/models/test_timestamp.py                            | 1 -
 tests/models/test_xcom_arg.py                             | 1 -
 tests/operators/test_bash.py                              | 2 +-
 tests/providers/amazon/aws/hooks/test_batch_waiters.py    | 1 -
 tests/providers/amazon/aws/hooks/test_ecr.py              | 1 -
 tests/providers/amazon/aws/operators/test_appflow.py      | 2 +-
 tests/providers/amazon/aws/triggers/test_glue_databrew.py | 2 +-
 tests/providers/amazon/conftest.py                        | 1 -
 tests/providers/cncf/kubernetes/operators/test_job.py     | 2 +-
 tests/providers/cncf/kubernetes/operators/test_pod.py     | 3 +--
 tests/providers/google/cloud/hooks/test_gcs.py            | 2 +-
 tests/providers/google/cloud/log/test_gcs_task_handler.py | 2 +-
 tests/providers/google/cloud/sensors/test_bigquery.py     | 2 +-
 tests/providers/openai/hooks/test_openai.py               | 2 +-
 tests/providers/trino/hooks/test_trino.py                 | 2 +-
 tests/sensors/test_external_task_sensor.py                | 4 ++--
 tests/utils/log/test_task_context_logger.py               | 2 +-
 tests/utils/test_logging_mixin.py                         | 2 +-
 tests/www/views/conftest.py                               | 4 ++--
 tests/www/views/test_views_base.py                        | 2 +-
 tests/www/views/test_views_cluster_activity.py            | 1 -
 tests/www/views/test_views_grid.py                        | 3 +--
 28 files changed, 29 insertions(+), 36 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index d8266b57c7..f119e7ed76 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1334,6 +1334,7 @@ extend-select = [
     "PT003",
     "PT009",
     "PT014", # Checks for duplicate test cases in pytest.mark.parametrize
+    "PT022",
     "PT023",
     "PT024",
     "PT025",
@@ -1351,6 +1352,11 @@ ignore = [
     "E731",
     "TCH003",  # Do not move imports from stdlib to TYPE_CHECKING block
 ]
+unfixable = [
+    # PT022 replace empty `yield` to empty `return`. Might be fixed with a 
combination of PLR1711
+    # In addition, it can't do anything with invalid typing annotations, 
protected by mypy.
+    "PT022",
+]
 
 [tool.ruff.format]
 docstring-code-format = true
diff --git a/tests/cli/commands/test_info_command.py 
b/tests/cli/commands/test_info_command.py
index 7203371484..44287bb0ec 100644
--- a/tests/cli/commands/test_info_command.py
+++ b/tests/cli/commands/test_info_command.py
@@ -174,7 +174,7 @@ class TestAirflowInfo:
 
 @pytest.fixture
 def setup_parser():
-    yield cli_parser.get_parser()
+    return cli_parser.get_parser()
 
 
 class TestInfoCommandMockHttpx:
diff --git a/tests/conftest.py b/tests/conftest.py
index 1847e1e909..f6fd2927aa 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -131,7 +131,6 @@ def reset_db():
     from airflow.utils import db
 
     db.resetdb()
-    yield
 
 
 ALLOWED_TRACE_SQL_COLUMNS = ["num", "time", "trace", "sql", "parameters", 
"count"]
diff --git a/tests/core/test_impersonation_tests.py 
b/tests/core/test_impersonation_tests.py
index 07a3d71d40..bc7d67503a 100644
--- a/tests/core/test_impersonation_tests.py
+++ b/tests/core/test_impersonation_tests.py
@@ -97,7 +97,6 @@ def check_original_docker_image():
             "and only allow to run the test there. This is done by checking 
/.dockerenv file "
             "(always present inside container) and checking for 
PYTHON_BASE_IMAGE variable."
         )
-    yield
 
 
 @pytest.fixture
@@ -229,7 +228,6 @@ class 
TestImpersonationWithCustomPythonPath(BaseImpersonationTest):
         monkeypatch.syspath_prepend(TEST_UTILS_FOLDER)
         self.dagbag = self.get_dagbag(TEST_DAG_CORRUPTED_FOLDER)
         monkeypatch.undo()
-        yield
 
     def test_impersonation_custom(self, monkeypatch):
         """
diff --git a/tests/jobs/test_local_task_job.py 
b/tests/jobs/test_local_task_job.py
index e621af36a7..d2188e83d1 100644
--- a/tests/jobs/test_local_task_job.py
+++ b/tests/jobs/test_local_task_job.py
@@ -69,7 +69,6 @@ def clear_db():
     db.clear_db_jobs()
     db.clear_db_runs()
     db.clear_db_task_fail()
-    yield
 
 
 @pytest.fixture(scope="class")
diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py
index 380dc0ca40..0599c9444f 100644
--- a/tests/models/test_dagbag.py
+++ b/tests/models/test_dagbag.py
@@ -25,7 +25,6 @@ import textwrap
 import zipfile
 from copy import deepcopy
 from datetime import datetime, timedelta, timezone
-from typing import Iterator
 from unittest import mock
 from unittest.mock import patch
 
@@ -267,11 +266,11 @@ class TestDagBag:
         return os.path.join(TEST_DAGS_FOLDER, "test_invalid_cron.py")
 
     @pytest.fixture
-    def invalid_cron_zipped_dag(self, invalid_cron_dag: str, tmp_path: 
pathlib.Path) -> Iterator[str]:
+    def invalid_cron_zipped_dag(self, invalid_cron_dag: str, tmp_path: 
pathlib.Path) -> str:
         zipped = tmp_path / "test_zip_invalid_cron.zip"
         with zipfile.ZipFile(zipped, "w") as zf:
             zf.write(invalid_cron_dag, os.path.basename(invalid_cron_dag))
-        yield os.fspath(zipped)
+        return os.fspath(zipped)
 
     @pytest.mark.parametrize("invalid_dag_name", ["invalid_cron_dag", 
"invalid_cron_zipped_dag"])
     def test_process_file_cron_validity_check(
@@ -386,14 +385,14 @@ class TestDagBag:
         assert [] == found
 
     @pytest.fixture
-    def zip_with_valid_dag_and_dup_tasks(self, tmp_path: pathlib.Path) -> 
Iterator[str]:
+    def zip_with_valid_dag_and_dup_tasks(self, tmp_path: pathlib.Path) -> str:
         failing_dag_file = TEST_DAGS_FOLDER / "test_invalid_dup_task.py"
         working_dag_file = TEST_DAGS_FOLDER / "test_example_bash_operator.py"
         zipped = tmp_path / "test_zip_invalid_dup_task.zip"
         with zipfile.ZipFile(zipped, "w") as zf:
             zf.write(failing_dag_file, failing_dag_file.name)
             zf.write(working_dag_file, working_dag_file.name)
-        yield os.fspath(zipped)
+        return os.fspath(zipped)
 
     def test_dag_registration_with_failure_zipped(self, 
zip_with_valid_dag_and_dup_tasks):
         dagbag = DagBag(dag_folder=os.devnull, include_examples=False)
diff --git a/tests/models/test_timestamp.py b/tests/models/test_timestamp.py
index bda4306498..e2f121bf1e 100644
--- a/tests/models/test_timestamp.py
+++ b/tests/models/test_timestamp.py
@@ -35,7 +35,6 @@ def clear_db():
     clear_db_logs()
     clear_db_runs()
     clear_db_dags()
-    yield
 
 
 def add_log(execdate, session, dag_maker, timezone_override=None):
diff --git a/tests/models/test_xcom_arg.py b/tests/models/test_xcom_arg.py
index b04e9cf6c0..2652a1032b 100644
--- a/tests/models/test_xcom_arg.py
+++ b/tests/models/test_xcom_arg.py
@@ -54,7 +54,6 @@ def build_python_op(dag_maker):
 def clear_db():
     clear_db_runs()
     clear_db_dags()
-    yield
 
 
 class TestXComArgBuild:
diff --git a/tests/operators/test_bash.py b/tests/operators/test_bash.py
index 7a4da18fe7..d00477b118 100644
--- a/tests/operators/test_bash.py
+++ b/tests/operators/test_bash.py
@@ -39,7 +39,7 @@ INTERVAL = timedelta(hours=12)
 
 @pytest.fixture
 def context():
-    yield {"ti": mock.Mock()}
+    return {"ti": mock.Mock()}
 
 
 class TestBashOperator:
diff --git a/tests/providers/amazon/aws/hooks/test_batch_waiters.py 
b/tests/providers/amazon/aws/hooks/test_batch_waiters.py
index 93bd77d0ee..72f2061b90 100644
--- a/tests/providers/amazon/aws/hooks/test_batch_waiters.py
+++ b/tests/providers/amazon/aws/hooks/test_batch_waiters.py
@@ -51,7 +51,6 @@ def patch_hook(monkeypatch, aws_region):
     """Patch hook object by dummy boto3 Batch client."""
     batch_client = boto3.client("batch", region_name=aws_region)
     monkeypatch.setattr(BatchWaitersHook, "conn", batch_client)
-    yield
 
 
 def test_batch_waiters(aws_region):
diff --git a/tests/providers/amazon/aws/hooks/test_ecr.py 
b/tests/providers/amazon/aws/hooks/test_ecr.py
index 730b6e11d4..cc598962f3 100644
--- a/tests/providers/amazon/aws/hooks/test_ecr.py
+++ b/tests/providers/amazon/aws/hooks/test_ecr.py
@@ -32,7 +32,6 @@ def patch_hook(monkeypatch):
     """Patch hook object by dummy boto3 ECR client."""
     ecr_client = boto3.client("ecr")
     monkeypatch.setattr(EcrHook, "conn", ecr_client)
-    yield
 
 
 @mock_aws
diff --git a/tests/providers/amazon/aws/operators/test_appflow.py 
b/tests/providers/amazon/aws/operators/test_appflow.py
index 3d53e99ea3..bc2456d0b2 100644
--- a/tests/providers/amazon/aws/operators/test_appflow.py
+++ b/tests/providers/amazon/aws/operators/test_appflow.py
@@ -60,7 +60,7 @@ def ctx(create_task_instance):
         task_id=TASK_ID,
         schedule="0 12 * * *",
     )
-    yield {"task_instance": ti}
+    return {"task_instance": ti}
 
 
 @pytest.fixture
diff --git a/tests/providers/amazon/aws/triggers/test_glue_databrew.py 
b/tests/providers/amazon/aws/triggers/test_glue_databrew.py
index 7352fffcd8..09137a0c7d 100644
--- a/tests/providers/amazon/aws/triggers/test_glue_databrew.py
+++ b/tests/providers/amazon/aws/triggers/test_glue_databrew.py
@@ -27,7 +27,7 @@ TEST_JOB_RUN_STATUS = "SUCCEEDED"
 
 @pytest.fixture
 def trigger():
-    yield GlueDataBrewJobCompleteTrigger(
+    return GlueDataBrewJobCompleteTrigger(
         aws_conn_id="aws_default", job_name=TEST_JOB_NAME, 
run_id=TEST_JOB_RUN_ID
     )
 
diff --git a/tests/providers/amazon/conftest.py 
b/tests/providers/amazon/conftest.py
index 85daf0a241..4ea7f429bc 100644
--- a/tests/providers/amazon/conftest.py
+++ b/tests/providers/amazon/conftest.py
@@ -102,4 +102,3 @@ def set_default_aws_settings(aws_testing_env_vars, 
monkeypatch):
             monkeypatch.delenv(env_name, raising=False)
     for env_name, value in aws_testing_env_vars.items():
         monkeypatch.setenv(env_name, value)
-    yield
diff --git a/tests/providers/cncf/kubernetes/operators/test_job.py 
b/tests/providers/cncf/kubernetes/operators/test_job.py
index 5f4efe3c96..3ec409fe66 100644
--- a/tests/providers/cncf/kubernetes/operators/test_job.py
+++ b/tests/providers/cncf/kubernetes/operators/test_job.py
@@ -331,7 +331,7 @@ class TestKubernetesJobOperator:
         tpl_file = tmp_path / "template.yaml"
         tpl_file.write_text(job_template_yaml)
 
-        yield tpl_file
+        return tpl_file
 
     @pytest.mark.parametrize(("randomize_name",), ([True], [False]))
     def test_job_template_file(self, randomize_name, job_template_file):
diff --git a/tests/providers/cncf/kubernetes/operators/test_pod.py 
b/tests/providers/cncf/kubernetes/operators/test_pod.py
index 0e9bda8602..5d914c8d60 100644
--- a/tests/providers/cncf/kubernetes/operators/test_pod.py
+++ b/tests/providers/cncf/kubernetes/operators/test_pod.py
@@ -76,7 +76,6 @@ def temp_override_attr(obj, attr, val):
 def clear_db():
     db.clear_db_dags()
     db.clear_db_runs()
-    yield
 
 
 def create_context(task, persist_to_db=False, map_index=None):
@@ -936,7 +935,7 @@ class TestKubernetesPodOperator:
         tpl_file = tmp_path / "template.yaml"
         tpl_file.write_text(pod_template_yaml)
 
-        yield tpl_file
+        return tpl_file
 
     @pytest.mark.parametrize(("randomize_name",), ([True], [False]))
     def test_pod_template_file(self, randomize_name, pod_template_file):
diff --git a/tests/providers/google/cloud/hooks/test_gcs.py 
b/tests/providers/google/cloud/hooks/test_gcs.py
index 1a0ce20030..7759b7de35 100644
--- a/tests/providers/google/cloud/hooks/test_gcs.py
+++ b/tests/providers/google/cloud/hooks/test_gcs.py
@@ -66,7 +66,7 @@ def testdata_string(testdata_bytes):
 def testdata_file(request, tmp_path_factory, testdata_bytes):
     fn = tmp_path_factory.mktemp(request.node.name) / "testfile_data"
     fn.write_bytes(testdata_bytes)
-    yield str(fn)
+    return str(fn)
 
 
 class TestGCSHookHelperFunctions:
diff --git a/tests/providers/google/cloud/log/test_gcs_task_handler.py 
b/tests/providers/google/cloud/log/test_gcs_task_handler.py
index a3e929b985..1344b2c797 100644
--- a/tests/providers/google/cloud/log/test_gcs_task_handler.py
+++ b/tests/providers/google/cloud/log/test_gcs_task_handler.py
@@ -58,7 +58,7 @@ class TestGCSTaskHandler:
             base_log_folder=local_log_location,
             gcs_log_folder="gs://bucket/remote/log/location",
         )
-        yield self.gcs_task_handler
+        return self.gcs_task_handler
 
     @mock.patch("airflow.providers.google.cloud.log.gcs_task_handler.GCSHook")
     @mock.patch("google.cloud.storage.Client")
diff --git a/tests/providers/google/cloud/sensors/test_bigquery.py 
b/tests/providers/google/cloud/sensors/test_bigquery.py
index 3eaffdd36e..36a331c78c 100644
--- a/tests/providers/google/cloud/sensors/test_bigquery.py
+++ b/tests/providers/google/cloud/sensors/test_bigquery.py
@@ -277,7 +277,7 @@ def context():
     Creates an empty context.
     """
     context = {}
-    yield context
+    return context
 
 
 class TestBigQueryTableExistenceAsyncSensor:
diff --git a/tests/providers/openai/hooks/test_openai.py 
b/tests/providers/openai/hooks/test_openai.py
index 9fed059a95..c3f17f7105 100644
--- a/tests/providers/openai/hooks/test_openai.py
+++ b/tests/providers/openai/hooks/test_openai.py
@@ -37,7 +37,7 @@ def mock_openai_connection():
         conn_type="openai",
     )
     os.environ[f"AIRFLOW_CONN_{conn.conn_id.upper()}"] = conn.get_uri()
-    yield conn
+    return conn
 
 
 @pytest.fixture
diff --git a/tests/providers/trino/hooks/test_trino.py 
b/tests/providers/trino/hooks/test_trino.py
index 7399e7e5b7..312ae5ec58 100644
--- a/tests/providers/trino/hooks/test_trino.py
+++ b/tests/providers/trino/hooks/test_trino.py
@@ -41,7 +41,7 @@ CERT_AUTHENTICATION = 
"airflow.providers.trino.hooks.trino.trino.auth.Certificat
 def jwt_token_file(tmp_path):
     jwt_file = tmp_path / "jwt.json"
     jwt_file.write_text('{"phony":"jwt"}')
-    yield jwt_file.__fspath__()
+    return jwt_file.__fspath__()
 
 
 class TestTrinoHookConn:
diff --git a/tests/sensors/test_external_task_sensor.py 
b/tests/sensors/test_external_task_sensor.py
index 0330098f22..9f43135452 100644
--- a/tests/sensors/test_external_task_sensor.py
+++ b/tests/sensors/test_external_task_sensor.py
@@ -101,7 +101,7 @@ def dag_zip_maker():
             os.unlink(self.__zip_file_name)
             os.rmdir(self.__tmp_dir)
 
-    yield DagZipMaker()
+    return DagZipMaker()
 
 
 class TestExternalTaskSensor:
@@ -1498,7 +1498,7 @@ def dag_bag_multiple():
         )
         begin >> task
 
-    yield dag_bag
+    return dag_bag
 
 
 def test_clear_multiple_external_task_marker(dag_bag_multiple):
diff --git a/tests/utils/log/test_task_context_logger.py 
b/tests/utils/log/test_task_context_logger.py
index a8754f4d0b..1171a40864 100644
--- a/tests/utils/log/test_task_context_logger.py
+++ b/tests/utils/log/test_task_context_logger.py
@@ -49,7 +49,7 @@ def ti(dag_maker):
 
     dr = dag.create_dagrun("running", run_id="abc")
     ti = dr.get_task_instances()[0]
-    yield ti
+    return ti
 
 
 def test_task_context_logger_enabled_by_default():
diff --git a/tests/utils/test_logging_mixin.py 
b/tests/utils/test_logging_mixin.py
index bcc7a40885..67d2f37ee8 100644
--- a/tests/utils/test_logging_mixin.py
+++ b/tests/utils/test_logging_mixin.py
@@ -38,7 +38,7 @@ def logger():
 
 @pytest.fixture
 def child_logger(logger):
-    yield logger.getChild("child")
+    return logger.getChild("child")
 
 
 @pytest.fixture
diff --git a/tests/www/views/conftest.py b/tests/www/views/conftest.py
index ae6015464d..821f541ef0 100644
--- a/tests/www/views/conftest.py
+++ b/tests/www/views/conftest.py
@@ -36,7 +36,7 @@ from tests.test_utils.www import client_with_login, 
client_without_login, client
 @pytest.fixture(autouse=True, scope="module")
 def session():
     settings.configure_orm()
-    yield settings.Session
+    return settings.Session
 
 
 @pytest.fixture(autouse=True, scope="module")
@@ -44,7 +44,7 @@ def examples_dag_bag(session):
     DagBag(include_examples=True).sync_to_db()
     dag_bag = DagBag(include_examples=True, read_dags_from_db=True)
     session.commit()
-    yield dag_bag
+    return dag_bag
 
 
 @pytest.fixture(scope="module")
diff --git a/tests/www/views/test_views_base.py 
b/tests/www/views/test_views_base.py
index 303e95d57f..63caa75f60 100644
--- a/tests/www/views/test_views_base.py
+++ b/tests/www/views/test_views_base.py
@@ -111,7 +111,7 @@ def heartbeat_not_running():
             Job.job_type == "SchedulerJob",
             Job.state == "running",
         ).delete()
-    yield "unhealthy", None
+    return "unhealthy", None
 
 
 @pytest.mark.parametrize(
diff --git a/tests/www/views/test_views_cluster_activity.py 
b/tests/www/views/test_views_cluster_activity.py
index 902ebb943b..a0d5bcf39f 100644
--- a/tests/www/views/test_views_cluster_activity.py
+++ b/tests/www/views/test_views_cluster_activity.py
@@ -47,7 +47,6 @@ def clean():
 @pytest.fixture
 def freeze_time_for_dagruns(time_machine):
     time_machine.move_to("2023-05-02T00:00:00+00:00", tick=False)
-    yield
 
 
 @pytest.fixture
diff --git a/tests/www/views/test_views_grid.py 
b/tests/www/views/test_views_grid.py
index 0e6e92b23c..2d5c88b5af 100644
--- a/tests/www/views/test_views_grid.py
+++ b/tests/www/views/test_views_grid.py
@@ -96,7 +96,7 @@ def dag_with_runs(dag_without_runs):
         
execution_date=dag_without_runs.dag.next_dagrun_info(date).logical_date,
     )
 
-    yield run_1, run_2
+    return run_1, run_2
 
 
 def test_no_runs(admin_client, dag_without_runs):
@@ -187,7 +187,6 @@ def 
test_grid_data_filtered_on_run_type_and_run_state(admin_client, dag_with_run
 @pytest.fixture
 def freeze_time_for_dagruns(time_machine):
     time_machine.move_to("2022-01-02T00:00:00+00:00", tick=False)
-    yield
 
 
 @pytest.mark.usefixtures("freeze_time_for_dagruns")

Reply via email to