This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new c531e387b2 Add running provider tests against Airflow 2.7 (#39862)
c531e387b2 is described below

commit c531e387b211c3a32ea55c4938bfb899573dad9c
Author: Jarek Potiuk <[email protected]>
AuthorDate: Tue May 28 19:34:23 2024 +0200

    Add running provider tests against Airflow 2.7 (#39862)
---
 dev/breeze/src/airflow_breeze/global_constants.py  |  4 +-
 .../endpoints/test_extra_link_endpoint.py          |  2 +-
 .../endpoints/test_plugin_endpoint.py              |  2 +-
 tests/api_connexion/schemas/test_plugin_schema.py  |  2 +-
 tests/conftest.py                                  |  5 ++-
 .../aws/auth_manager/cli/test_avp_commands.py      |  3 ++
 .../aws/auth_manager/cli/test_idc_commands.py      |  3 ++
 .../amazon/aws/auth_manager/views/test_auth.py     |  4 ++
 tests/providers/atlassian/jira/hooks/test_jira.py  | 41 ++++++++++---------
 .../atlassian/jira/operators/test_jira.py          | 17 ++++----
 .../providers/atlassian/jira/sensors/test_jira.py  | 21 +++++-----
 .../kubernetes/operators/test_spark_kubernetes.py  | 12 +++++-
 tests/providers/common/sql/hooks/test_dbapi.py     |  5 +++
 tests/providers/common/sql/hooks/test_sql.py       |  5 +++
 tests/providers/common/sql/hooks/test_sqlparse.py  |  5 +++
 tests/providers/common/sql/operators/test_sql.py   |  6 ++-
 .../common/sql/operators/test_sql_execute.py       |  5 +++
 tests/providers/common/sql/sensors/test_sql.py     |  5 +++
 tests/providers/common/sql/test_utils.py           |  8 ++++
 tests/providers/docker/hooks/test_docker.py        |  2 +
 .../test_role_and_permission_endpoint.py           |  2 -
 tests/providers/mongo/hooks/test_mongo.py          |  3 +-
 .../openlineage/utils/test_selective_enable.py     |  6 ++-
 tests/serialization/test_dag_serialization.py      |  2 +-
 tests/test_utils/compat.py                         | 46 +++++++++++++++++++++-
 tests/test_utils/mock_operators.py                 |  2 +-
 tests/www/views/test_views_extra_links.py          |  5 ++-
 27 files changed, 170 insertions(+), 53 deletions(-)

diff --git a/dev/breeze/src/airflow_breeze/global_constants.py 
b/dev/breeze/src/airflow_breeze/global_constants.py
index c2658168b6..2e06517998 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -488,9 +488,9 @@ CHICKEN_EGG_PROVIDERS = " ".join([])
 BASE_PROVIDERS_COMPATIBILITY_CHECKS: list[dict[str, str | list[str]]] = [
     {
         "python-version": "3.8",
-        "airflow-version": "2.7.1",
+        "airflow-version": "2.7.3",
         "remove-providers": "common.io fab",
-        "run-tests": "false",
+        "run-tests": "true",
     },
     {
         "python-version": "3.8",
diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py 
b/tests/api_connexion/endpoints/test_extra_link_endpoint.py
index 3e803a4bf4..f6590b6d5a 100644
--- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py
+++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py
@@ -22,7 +22,6 @@ from urllib.parse import quote_plus
 import pytest
 
 from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP
-from airflow.models.baseoperatorlink import BaseOperatorLink
 from airflow.models.dag import DAG
 from airflow.models.dagbag import DagBag
 from airflow.models.xcom import XCom
@@ -34,6 +33,7 @@ from airflow.utils import timezone
 from airflow.utils.state import DagRunState
 from airflow.utils.types import DagRunType
 from tests.test_utils.api_connexion_utils import create_user, delete_user
+from tests.test_utils.compat import BaseOperatorLink
 from tests.test_utils.db import clear_db_runs, clear_db_xcom
 from tests.test_utils.mock_plugins import mock_plugin_manager
 
diff --git a/tests/api_connexion/endpoints/test_plugin_endpoint.py 
b/tests/api_connexion/endpoints/test_plugin_endpoint.py
index f56d04a764..0206c1ff0f 100644
--- a/tests/api_connexion/endpoints/test_plugin_endpoint.py
+++ b/tests/api_connexion/endpoints/test_plugin_endpoint.py
@@ -23,13 +23,13 @@ from flask import Blueprint
 from flask_appbuilder import BaseView
 
 from airflow.hooks.base import BaseHook
-from airflow.models.baseoperatorlink import BaseOperatorLink
 from airflow.plugins_manager import AirflowPlugin
 from airflow.security import permissions
 from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
 from airflow.timetables.base import Timetable
 from airflow.utils.module_loading import qualname
 from tests.test_utils.api_connexion_utils import assert_401, create_user, 
delete_user
+from tests.test_utils.compat import BaseOperatorLink
 from tests.test_utils.config import conf_vars
 from tests.test_utils.mock_plugins import mock_plugin_manager
 
diff --git a/tests/api_connexion/schemas/test_plugin_schema.py 
b/tests/api_connexion/schemas/test_plugin_schema.py
index a7560ea6b7..21c002d595 100644
--- a/tests/api_connexion/schemas/test_plugin_schema.py
+++ b/tests/api_connexion/schemas/test_plugin_schema.py
@@ -25,8 +25,8 @@ from airflow.api_connexion.schemas.plugin_schema import (
     plugin_schema,
 )
 from airflow.hooks.base import BaseHook
-from airflow.models.baseoperatorlink import BaseOperatorLink
 from airflow.plugins_manager import AirflowPlugin
+from tests.test_utils.compat import BaseOperatorLink
 
 
 class PluginHook(BaseHook): ...
diff --git a/tests/conftest.py b/tests/conftest.py
index d4a3a727b0..65fb0d9925 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -334,7 +334,7 @@ def initial_db_init():
     from airflow.utils import db
     from airflow.www.extensions.init_appbuilder import init_appbuilder
     from airflow.www.extensions.init_auth_manager import get_auth_manager
-    from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS
+    from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_10_PLUS
 
     if AIRFLOW_V_2_10_PLUS:
         db.resetdb(use_migration_files=True)
@@ -345,7 +345,8 @@ def initial_db_init():
     flask_app = Flask(__name__)
     flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", 
"SQL_ALCHEMY_CONN")
     init_appbuilder(flask_app)
-    get_auth_manager().init()
+    if AIRFLOW_V_2_8_PLUS:
+        get_auth_manager().init()
 
 
 @pytest.fixture(autouse=True, scope="session")
diff --git a/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py 
b/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py
index c9bf7ffe2f..464b42993b 100644
--- a/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py
+++ b/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py
@@ -23,10 +23,13 @@ import pytest
 
 from airflow.cli import cli_parser
 from airflow.providers.amazon.aws.auth_manager.cli.avp_commands import 
init_avp, update_schema
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
 from tests.test_utils.config import conf_vars
 
 mock_boto3 = Mock()
 
+pytestmark = pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Test requires 
Airflow 2.8+")
+
 
 @pytest.mark.db_test
 class TestAvpCommands:
diff --git a/tests/providers/amazon/aws/auth_manager/cli/test_idc_commands.py 
b/tests/providers/amazon/aws/auth_manager/cli/test_idc_commands.py
index 9913e1ae7c..432bd0eac2 100644
--- a/tests/providers/amazon/aws/auth_manager/cli/test_idc_commands.py
+++ b/tests/providers/amazon/aws/auth_manager/cli/test_idc_commands.py
@@ -23,10 +23,13 @@ import pytest
 
 from airflow.cli import cli_parser
 from airflow.providers.amazon.aws.auth_manager.cli.idc_commands import init_idc
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
 from tests.test_utils.config import conf_vars
 
 mock_boto3 = Mock()
 
+pytestmark = pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Test requires 
Airflow 2.8+")
+
 
 @pytest.mark.db_test
 class TestIdcCommands:
diff --git a/tests/providers/amazon/aws/auth_manager/views/test_auth.py 
b/tests/providers/amazon/aws/auth_manager/views/test_auth.py
index 85ef6aafe6..7474d74727 100644
--- a/tests/providers/amazon/aws/auth_manager/views/test_auth.py
+++ b/tests/providers/amazon/aws/auth_manager/views/test_auth.py
@@ -23,10 +23,14 @@ from flask import session, url_for
 
 from airflow.exceptions import AirflowException
 from airflow.www import app as application
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
 from tests.test_utils.config import conf_vars
 
 pytest.importorskip("onelogin")
 
+pytestmark = pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Test requires 
Airflow 2.8+")
+
+
 SAML_METADATA_URL = "/saml/metadata"
 SAML_METADATA_PARSED = {
     "idp": {
diff --git a/tests/providers/atlassian/jira/hooks/test_jira.py 
b/tests/providers/atlassian/jira/hooks/test_jira.py
index 98fdc824c4..339274e317 100644
--- a/tests/providers/atlassian/jira/hooks/test_jira.py
+++ b/tests/providers/atlassian/jira/hooks/test_jira.py
@@ -24,6 +24,7 @@ import pytest
 from airflow.exceptions import AirflowProviderDeprecationWarning
 from airflow.models import Connection
 from airflow.providers.atlassian.jira.hooks.jira import JiraHook
+from tests.test_utils.compat import connection_as_json
 
 
 @pytest.fixture
@@ -46,27 +47,31 @@ class TestJiraHook:
 
         monkeypatch.setenv(
             f"AIRFLOW_CONN_{self.conn_id}".upper(),
-            Connection(
-                conn_id="jira_default",
-                conn_type="jira",
-                host="https://localhost/jira/";,
-                port=443,
-                login="user",
-                password="password",
-                extra='{"verify": false, "project": "AIRFLOW"}',
-            ).as_json(),
+            connection_as_json(
+                Connection(
+                    conn_id="jira_default",
+                    conn_type="jira",
+                    host="https://localhost/jira/";,
+                    port=443,
+                    login="user",
+                    password="password",
+                    extra='{"verify": false, "project": "AIRFLOW"}',
+                )
+            ),
         )
         monkeypatch.setenv(
             f"AIRFLOW_CONN_{self.conn_id_with_str_verify}".upper(),
-            Connection(
-                conn_id=self.conn_id_with_str_verify,
-                conn_type="jira",
-                host="https://localhost/jira/";,
-                port=443,
-                login="user",
-                password="password",
-                extra='{"verify": "False", "project": "AIRFLOW"}',
-            ).as_json(),
+            connection_as_json(
+                Connection(
+                    conn_id=self.conn_id_with_str_verify,
+                    conn_type="jira",
+                    host="https://localhost/jira/";,
+                    port=443,
+                    login="user",
+                    password="password",
+                    extra='{"verify": "False", "project": "AIRFLOW"}',
+                )
+            ),
         )
 
     def test_jira_client_connection(self, mocked_jira_client):
diff --git a/tests/providers/atlassian/jira/operators/test_jira.py 
b/tests/providers/atlassian/jira/operators/test_jira.py
index e49fba12ae..9282089bae 100644
--- a/tests/providers/atlassian/jira/operators/test_jira.py
+++ b/tests/providers/atlassian/jira/operators/test_jira.py
@@ -24,6 +24,7 @@ import pytest
 from airflow.models import Connection
 from airflow.providers.atlassian.jira.operators.jira import JiraOperator
 from airflow.utils import timezone
+from tests.test_utils.compat import connection_as_json
 
 DEFAULT_DATE = timezone.datetime(2017, 1, 1)
 MINIMAL_TEST_TICKET = {
@@ -49,13 +50,15 @@ class TestJiraOperator:
     def setup_test_cases(self, monkeypatch):
         monkeypatch.setenv(
             "AIRFLOW_CONN_JIRA_DEFAULT",
-            Connection(
-                conn_id="jira_default",
-                conn_type="jira",
-                host="https://localhost/jira/";,
-                port=443,
-                extra='{"verify": false, "project": "AIRFLOW"}',
-            ).as_json(),
+            connection_as_json(
+                Connection(
+                    conn_id="jira_default",
+                    conn_type="jira",
+                    host="https://localhost/jira/";,
+                    port=443,
+                    extra='{"verify": false, "project": "AIRFLOW"}',
+                )
+            ),
         )
         with mock.patch("airflow.models.baseoperator.BaseOperator.xcom_push", 
return_value=None) as m:
             self.mocked_xcom_push = m
diff --git a/tests/providers/atlassian/jira/sensors/test_jira.py 
b/tests/providers/atlassian/jira/sensors/test_jira.py
index 0da2e73209..5e2ce896b6 100644
--- a/tests/providers/atlassian/jira/sensors/test_jira.py
+++ b/tests/providers/atlassian/jira/sensors/test_jira.py
@@ -24,6 +24,7 @@ import pytest
 from airflow.models import Connection
 from airflow.providers.atlassian.jira.sensors.jira import JiraTicketSensor
 from airflow.utils import timezone
+from tests.test_utils.compat import connection_as_json
 
 DEFAULT_DATE = timezone.datetime(2017, 1, 1)
 MINIMAL_TEST_TICKET = {
@@ -49,15 +50,17 @@ class TestJiraSensor:
     def setup_test_cases(self, monkeypatch):
         monkeypatch.setenv(
             "AIRFLOW_CONN_JIRA_DEFAULT".upper(),
-            Connection(
-                conn_id="jira_default",
-                conn_type="jira",
-                host="https://localhost/jira/";,
-                port=443,
-                login="user",
-                password="password",
-                extra='{"verify": false, "project": "AIRFLOW"}',
-            ).as_json(),
+            connection_as_json(
+                Connection(
+                    conn_id="jira_default",
+                    conn_type="jira",
+                    host="https://localhost/jira/";,
+                    port=443,
+                    login="user",
+                    password="password",
+                    extra='{"verify": false, "project": "AIRFLOW"}',
+                )
+            ),
         )
 
     def test_issue_label_set(self, mocked_jira_client):
diff --git a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py 
b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py
index eefb634759..2f9f7d3f2c 100644
--- a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py
+++ b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py
@@ -31,9 +31,9 @@ from kubernetes.client import models as k8s
 from airflow import DAG
 from airflow.models import Connection, DagRun, TaskInstance
 from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import 
SparkKubernetesOperator
-from airflow.template.templater import LiteralValue
 from airflow.utils import db, timezone
 from airflow.utils.types import DagRunType
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
 
 
 
@patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook")
@@ -624,12 +624,17 @@ def 
test_resolve_application_file_template_non_dictionary(dag_maker, tmp_path, b
 @pytest.mark.parametrize(
     "use_literal_value", [pytest.param(True, id="literal-value"), 
pytest.param(False, id="whitespace-compat")]
 )
[email protected](
+    not AIRFLOW_V_2_8_PLUS, reason="Skipping tests that require LiteralValue 
for Airflow < 2.8.0"
+)
 def test_resolve_application_file_real_file(create_task_instance_of_operator, 
tmp_path, use_literal_value):
     application_file = tmp_path / "test-application-file.yml"
     application_file.write_text("foo: bar\nspam: egg")
 
     application_file = application_file.resolve().as_posix()
     if use_literal_value:
+        from airflow.template.templater import LiteralValue
+
         application_file = LiteralValue(application_file)
     else:
         # Prior Airflow 2.8 workaround was adding whitespace at the end of the 
filepath
@@ -649,8 +654,13 @@ def 
test_resolve_application_file_real_file(create_task_instance_of_operator, tm
 
 
 @pytest.mark.db_test
[email protected](
+    not AIRFLOW_V_2_8_PLUS, reason="Skipping tests that require LiteralValue 
for Airflow < 2.8.0"
+)
 def 
test_resolve_application_file_real_file_not_exists(create_task_instance_of_operator,
 tmp_path):
     application_file = (tmp_path / 
"test-application-file.yml").resolve().as_posix()
+    from airflow.template.templater import LiteralValue
+
     ti = create_task_instance_of_operator(
         SparkKubernetesOperator,
         application_file=LiteralValue(application_file),
diff --git a/tests/providers/common/sql/hooks/test_dbapi.py 
b/tests/providers/common/sql/hooks/test_dbapi.py
index 86b7b76563..a7c33b5549 100644
--- a/tests/providers/common/sql/hooks/test_dbapi.py
+++ b/tests/providers/common/sql/hooks/test_dbapi.py
@@ -27,6 +27,11 @@ from pyodbc import Cursor
 from airflow.hooks.base import BaseHook
 from airflow.models import Connection
 from airflow.providers.common.sql.hooks.sql import DbApiHook, 
fetch_all_handler, fetch_one_handler
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
+
+pytestmark = [
+    pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 
2.8.0+ only"),
+]
 
 
 class DbApiHookInProvider(DbApiHook):
diff --git a/tests/providers/common/sql/hooks/test_sql.py 
b/tests/providers/common/sql/hooks/test_sql.py
index 4bd5bdcc54..5d3f4acfb1 100644
--- a/tests/providers/common/sql/hooks/test_sql.py
+++ b/tests/providers/common/sql/hooks/test_sql.py
@@ -28,6 +28,11 @@ from airflow.models import Connection
 from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler
 from airflow.utils.session import provide_session
 from tests.providers.common.sql.test_utils import mock_hook
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
+
+pytestmark = [
+    pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 
2.8.0+ only"),
+]
 
 TASK_ID = "sql-operator"
 HOST = "host"
diff --git a/tests/providers/common/sql/hooks/test_sqlparse.py 
b/tests/providers/common/sql/hooks/test_sqlparse.py
index 72306287a4..1ce7cb5d21 100644
--- a/tests/providers/common/sql/hooks/test_sqlparse.py
+++ b/tests/providers/common/sql/hooks/test_sqlparse.py
@@ -19,6 +19,11 @@ from __future__ import annotations
 import pytest
 
 from airflow.providers.common.sql.hooks.sql import DbApiHook
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
+
+pytestmark = [
+    pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 
2.8.0+ only"),
+]
 
 
 @pytest.mark.parametrize(
diff --git a/tests/providers/common/sql/operators/test_sql.py 
b/tests/providers/common/sql/operators/test_sql.py
index 2df573118a..85d26c75ae 100644
--- a/tests/providers/common/sql/operators/test_sql.py
+++ b/tests/providers/common/sql/operators/test_sql.py
@@ -42,8 +42,12 @@ from airflow.providers.postgres.hooks.postgres import 
PostgresHook
 from airflow.utils import timezone
 from airflow.utils.session import create_session
 from airflow.utils.state import State
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
 
-pytestmark = pytest.mark.db_test
+pytestmark = [
+    pytest.mark.db_test,
+    pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 
2.8.0+ only"),
+]
 
 
 class MockHook:
diff --git a/tests/providers/common/sql/operators/test_sql_execute.py 
b/tests/providers/common/sql/operators/test_sql_execute.py
index 565cc44c3f..0ba52abba9 100644
--- a/tests/providers/common/sql/operators/test_sql_execute.py
+++ b/tests/providers/common/sql/operators/test_sql_execute.py
@@ -29,6 +29,11 @@ from airflow.models import Connection
 from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler
 from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
 from airflow.providers.openlineage.extractors.base import OperatorLineage
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
+
+pytestmark = [
+    pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 
2.8.0+ only"),
+]
 
 DATE = "2017-04-20"
 TASK_ID = "sql-operator"
diff --git a/tests/providers/common/sql/sensors/test_sql.py 
b/tests/providers/common/sql/sensors/test_sql.py
index fc2d83895e..81f616002a 100644
--- a/tests/providers/common/sql/sensors/test_sql.py
+++ b/tests/providers/common/sql/sensors/test_sql.py
@@ -26,6 +26,11 @@ from airflow.models.dag import DAG
 from airflow.providers.common.sql.hooks.sql import DbApiHook
 from airflow.providers.common.sql.sensors.sql import SqlSensor
 from airflow.utils.timezone import datetime
+from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS
+
+pytestmark = [
+    pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 
2.8.0+ only"),
+]
 
 DEFAULT_DATE = datetime(2015, 1, 1)
 TEST_DAG_ID = "unit_test_sql_dag"
diff --git a/tests/providers/common/sql/test_utils.py 
b/tests/providers/common/sql/test_utils.py
index c4a7e21fbb..3f5255f8bd 100644
--- a/tests/providers/common/sql/test_utils.py
+++ b/tests/providers/common/sql/test_utils.py
@@ -20,7 +20,15 @@ from __future__ import annotations
 from typing import TYPE_CHECKING
 from unittest import mock
 
+import pytest
+
 from airflow.models import Connection
+from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
+
+pytestmark = [
+    pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 
2.8.0+ only"),
+]
+
 
 if TYPE_CHECKING:
     from airflow.hooks.base import BaseHook
diff --git a/tests/providers/docker/hooks/test_docker.py 
b/tests/providers/docker/hooks/test_docker.py
index 6eb177a281..05ff5ed582 100644
--- a/tests/providers/docker/hooks/test_docker.py
+++ b/tests/providers/docker/hooks/test_docker.py
@@ -42,6 +42,7 @@ TEST_CONN = {"host": "some.docker.registry.com", "login": 
"some_user", "password
 MOCK_CONNECTION_NOT_EXIST_MSG = "Testing connection not exists"
 MOCK_CONNECTION_NOT_EXISTS_EX = 
AirflowNotFoundException(MOCK_CONNECTION_NOT_EXIST_MSG)
 HOOK_LOGGER_NAME = 
"airflow.task.hooks.airflow.providers.docker.hooks.docker.DockerHook"
+AIRFLOW_V_2_7_HOOK_LOGGER_NAME = "airflow.providers.docker.hooks.docker"
 
 
 @pytest.fixture
@@ -107,6 +108,7 @@ def test_create_api_client(conn_id, hook_conn, 
docker_api_client_patcher, caplog
         - If `docker_conn_id` not provided that hook doesn't try access to 
Airflow Connections.
     """
     caplog.set_level(logging.DEBUG, logger=HOOK_LOGGER_NAME)
+    caplog.set_level(logging.DEBUG, logger=AIRFLOW_V_2_7_HOOK_LOGGER_NAME)
     hook = DockerHook(
         docker_conn_id=conn_id, base_url=TEST_TLS_BASE_URL, 
version=TEST_VERSION, tls=True, timeout=42
     )
diff --git 
a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py
 
b/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py
index a91a434412..454fce467c 100644
--- 
a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py
+++ 
b/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py
@@ -35,8 +35,6 @@ from tests.test_utils.api_connexion_utils import (
     delete_user,
 )
 
-pytestmark = pytest.mark.db_test
-
 
 @pytest.fixture(scope="module")
 def configured_app(minimal_app_for_auth_api):
diff --git a/tests/providers/mongo/hooks/test_mongo.py 
b/tests/providers/mongo/hooks/test_mongo.py
index d546c93e9d..27dd3ce535 100644
--- a/tests/providers/mongo/hooks/test_mongo.py
+++ b/tests/providers/mongo/hooks/test_mongo.py
@@ -27,6 +27,7 @@ import pytest
 from airflow.exceptions import AirflowProviderDeprecationWarning
 from airflow.models import Connection
 from airflow.providers.mongo.hooks.mongo import MongoHook
+from tests.test_utils.compat import connection_as_json
 
 pytestmark = pytest.mark.db_test
 
@@ -59,7 +60,7 @@ def mongo_connections():
 
     with pytest.MonkeyPatch.context() as mp:
         for conn in connections:
-            mp.setenv(f"AIRFLOW_CONN_{conn.conn_id.upper()}", conn.as_json())
+            mp.setenv(f"AIRFLOW_CONN_{conn.conn_id.upper()}", 
connection_as_json(conn))
         yield
 
 
diff --git a/tests/providers/openlineage/utils/test_selective_enable.py 
b/tests/providers/openlineage/utils/test_selective_enable.py
index a177181489..d44839c5b9 100644
--- a/tests/providers/openlineage/utils/test_selective_enable.py
+++ b/tests/providers/openlineage/utils/test_selective_enable.py
@@ -17,6 +17,8 @@
 # under the License.
 from __future__ import annotations
 
+from pendulum import now
+
 from airflow.decorators import dag, task
 from airflow.models import DAG
 from airflow.operators.empty import EmptyOperator
@@ -31,7 +33,7 @@ from airflow.providers.openlineage.utils.selective_enable 
import (
 
 class TestOpenLineageSelectiveEnable:
     def setup_method(self):
-        @dag(dag_id="test_selective_enable_decorated_dag")
+        @dag(dag_id="test_selective_enable_decorated_dag", start_date=now())
         def decorated_dag():
             @task
             def decorated_task():
@@ -41,7 +43,7 @@ class TestOpenLineageSelectiveEnable:
 
         self.decorated_dag = decorated_dag()
 
-        with DAG(dag_id="test_selective_enable_dag") as self.dag:
+        with DAG(dag_id="test_selective_enable_dag", start_date=now()) as 
self.dag:
             self.task = EmptyOperator(task_id="test_selective_enable")
 
     def test_enable_lineage_task_level(self):
diff --git a/tests/serialization/test_dag_serialization.py 
b/tests/serialization/test_dag_serialization.py
index 0645995a89..fbefe71d28 100644
--- a/tests/serialization/test_dag_serialization.py
+++ b/tests/serialization/test_dag_serialization.py
@@ -48,7 +48,6 @@ from airflow.decorators.base import DecoratedOperator
 from airflow.exceptions import AirflowException, SerializationError
 from airflow.hooks.base import BaseHook
 from airflow.models.baseoperator import BaseOperator
-from airflow.models.baseoperatorlink import BaseOperatorLink
 from airflow.models.connection import Connection
 from airflow.models.dag import DAG
 from airflow.models.dagbag import DagBag
@@ -78,6 +77,7 @@ from airflow.utils import timezone
 from airflow.utils.operator_resources import Resources
 from airflow.utils.task_group import TaskGroup
 from airflow.utils.xcom import XCOM_RETURN_KEY
+from tests.test_utils.compat import BaseOperatorLink
 from tests.test_utils.config import conf_vars
 from tests.test_utils.mock_operators import AirflowLink2, CustomOperator, 
GoogleLink, MockOperator
 from tests.test_utils.timetables import CustomSerializationTimetable, 
cron_timetable, delta_timetable
diff --git a/tests/test_utils/compat.py b/tests/test_utils/compat.py
index 88d9e08913..b5e876a626 100644
--- a/tests/test_utils/compat.py
+++ b/tests/test_utils/compat.py
@@ -47,6 +47,12 @@ AIRFLOW_V_2_8_PLUS = Version(AIRFLOW_VERSION.base_version) 
>= Version("2.8.0")
 AIRFLOW_V_2_9_PLUS = Version(AIRFLOW_VERSION.base_version) >= Version("2.9.0")
 AIRFLOW_V_2_10_PLUS = Version(AIRFLOW_VERSION.base_version) >= 
Version("2.10.0")
 
+try:
+    from airflow.models.baseoperatorlink import BaseOperatorLink
+except ImportError:
+    # Compatibility for Airflow 2.7.*
+    from airflow.models.baseoperator import BaseOperatorLink
+
 
 def deserialize_operator(serialized_operator: dict[str, Any]) -> Operator:
     if AIRFLOW_V_2_10_PLUS:
@@ -66,6 +72,45 @@ def deserialize_operator(serialized_operator: dict[str, 
Any]) -> Operator:
         return SerializedBaseOperator.deserialize_operator(serialized_operator)
 
 
+def connection_to_dict(
+    connection: Connection, *, prune_empty: bool = False, validate: bool = True
+) -> dict[str, Any]:
+    """
+    Convert Connection to json-serializable dictionary (compatibility code for 
Airflow 2.7 tests)
+
+    :param connection: connection to convert to dict
+    :param prune_empty: Whether or not remove empty values.
+    :param validate: Validate dictionary is JSON-serializable
+
+    :meta private:
+    """
+    conn = {
+        "conn_id": connection.conn_id,
+        "conn_type": connection.conn_type,
+        "description": connection.description,
+        "host": connection.host,
+        "login": connection.login,
+        "password": connection.password,
+        "schema": connection.schema,
+        "port": connection.port,
+    }
+    if prune_empty:
+        conn = prune_dict(val=conn, mode="strict")
+    if (extra := connection.extra_dejson) or not prune_empty:
+        conn["extra"] = extra
+
+    if validate:
+        json.dumps(conn)
+    return conn
+
+
+def connection_as_json(connection: Connection) -> str:
+    """Convert Connection to JSON-string object (compatibility code for 
Airflow 2.7 tests)."""
+    conn_repr = connection_to_dict(connection, prune_empty=True, 
validate=False)
+    conn_repr.pop("conn_id", None)
+    return json.dumps(conn_repr)
+
+
 @contextlib.contextmanager
 def ignore_provider_compatibility_error(minimum_version: str, module_name: 
str):
     """
@@ -73,7 +118,6 @@ def ignore_provider_compatibility_error(minimum_version: 
str, module_name: str):
 
     :param minimum_version: The version string that should be in the error 
message.
     :param module_name: The name of the module that is being tested.
-    :param include_import_errors: Whether to include ImportError in the list 
of errors to ignore.
     """
     import pytest
 
diff --git a/tests/test_utils/mock_operators.py 
b/tests/test_utils/mock_operators.py
index e9a05c9edb..f4ddef666c 100644
--- a/tests/test_utils/mock_operators.py
+++ b/tests/test_utils/mock_operators.py
@@ -22,8 +22,8 @@ from typing import TYPE_CHECKING, Any, Sequence
 import attr
 
 from airflow.models.baseoperator import BaseOperator
-from airflow.models.baseoperatorlink import BaseOperatorLink
 from airflow.models.xcom import XCom
+from tests.test_utils.compat import BaseOperatorLink
 
 if TYPE_CHECKING:
     import jinja2
diff --git a/tests/www/views/test_views_extra_links.py 
b/tests/www/views/test_views_extra_links.py
index a37e9f32d8..d5b70caba5 100644
--- a/tests/www/views/test_views_extra_links.py
+++ b/tests/www/views/test_views_extra_links.py
@@ -24,11 +24,11 @@ from unittest import mock
 import pytest
 
 from airflow.models.baseoperator import BaseOperator
-from airflow.models.baseoperatorlink import BaseOperatorLink
 from airflow.models.dag import DAG
 from airflow.utils import timezone
 from airflow.utils.state import DagRunState
 from airflow.utils.types import DagRunType
+from tests.test_utils.compat import BaseOperatorLink
 from tests.test_utils.db import clear_db_runs
 from tests.test_utils.mock_operators import AirflowLink, Dummy2TestOperator, 
Dummy3TestOperator
 
@@ -62,7 +62,8 @@ class FooBarLink(BaseOperatorLink):
 
 
 class DummyTestOperator(BaseOperator):
-    operator_extra_links = (
+    # We need to ignore type check here due to 2.7 compatibility import
+    operator_extra_links = (  # type: ignore[assignment]
         RaiseErrorLink(),
         NoResponseLink(),
         FooBarLink(),

Reply via email to